mirror of
https://github.com/home-assistant/core.git
synced 2026-01-05 23:35:24 +01:00
Compare commits
61 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a002e9b12f | ||
|
|
db64a9ebfa | ||
|
|
3fbde22cc4 | ||
|
|
758e60a58d | ||
|
|
5201410e39 | ||
|
|
b1b7944012 | ||
|
|
8ef04268be | ||
|
|
b107e87d38 | ||
|
|
b0b9579778 | ||
|
|
7eade4029a | ||
|
|
3d4913348a | ||
|
|
1720b71d62 | ||
|
|
589086f0d0 | ||
|
|
6f8060dea7 | ||
|
|
b8ef87d84c | ||
|
|
7370b0ffc6 | ||
|
|
209cf44e8e | ||
|
|
b7dacabbe4 | ||
|
|
5098c35814 | ||
|
|
896df60f32 | ||
|
|
b26ab2849b | ||
|
|
36f52a26f6 | ||
|
|
f0295d562d | ||
|
|
081bd22e59 | ||
|
|
668c73010a | ||
|
|
fe371f0438 | ||
|
|
be28dc0bca | ||
|
|
4578baca3e | ||
|
|
6d7dfc0804 | ||
|
|
c5cf95c14b | ||
|
|
f79ce7bd04 | ||
|
|
578c1b283a | ||
|
|
5ae0844f35 | ||
|
|
8e3e2d436e | ||
|
|
4af6804c50 | ||
|
|
1333e23c23 | ||
|
|
b572c0df7f | ||
|
|
139a0ca008 | ||
|
|
0458b5e3a6 | ||
|
|
c91c9f2b40 | ||
|
|
5165d746aa | ||
|
|
6cadc5b157 | ||
|
|
d32e3dc31a | ||
|
|
807bfb71df | ||
|
|
c2f16cf21d | ||
|
|
9ca7efbe4c | ||
|
|
a5dec53e1b | ||
|
|
f1de903fb5 | ||
|
|
fa07787007 | ||
|
|
0d27e10d77 | ||
|
|
8dee5f4cf8 | ||
|
|
c6a7350db1 | ||
|
|
2a68952334 | ||
|
|
7f801faed1 | ||
|
|
02600bf190 | ||
|
|
f41d283354 | ||
|
|
f34e831650 | ||
|
|
c9ec533aa5 | ||
|
|
2a879afc7a | ||
|
|
71c2557405 | ||
|
|
214fc04473 |
@@ -589,7 +589,6 @@ omit =
|
||||
homeassistant/components/nut/sensor.py
|
||||
homeassistant/components/nx584/alarm_control_panel.py
|
||||
homeassistant/components/nzbget/coordinator.py
|
||||
homeassistant/components/nzbget/sensor.py
|
||||
homeassistant/components/obihai/*
|
||||
homeassistant/components/octoprint/*
|
||||
homeassistant/components/oem/climate.py
|
||||
@@ -760,6 +759,7 @@ omit =
|
||||
homeassistant/components/shodan/sensor.py
|
||||
homeassistant/components/shelly/__init__.py
|
||||
homeassistant/components/shelly/binary_sensor.py
|
||||
homeassistant/components/shelly/cover.py
|
||||
homeassistant/components/shelly/entity.py
|
||||
homeassistant/components/shelly/light.py
|
||||
homeassistant/components/shelly/sensor.py
|
||||
|
||||
@@ -253,7 +253,7 @@ homeassistant/components/met/* @danielhiversen @thimic
|
||||
homeassistant/components/meteo_france/* @hacf-fr @oncleben31 @Quentame
|
||||
homeassistant/components/meteoalarm/* @rolfberkenbosch
|
||||
homeassistant/components/metoffice/* @MrHarcombe
|
||||
homeassistant/components/miflora/* @danielhiversen @ChristianKuehnel
|
||||
homeassistant/components/miflora/* @danielhiversen @ChristianKuehnel @basnijholt
|
||||
homeassistant/components/mikrotik/* @engrbm87
|
||||
homeassistant/components/mill/* @danielhiversen
|
||||
homeassistant/components/min_max/* @fabaff
|
||||
|
||||
@@ -49,6 +49,7 @@ jobs:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;cmake;git;linux-headers;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev'
|
||||
builderPip: 'Cython;numpy;scikit-build'
|
||||
builderEnvFile: true
|
||||
skipBinary: 'aiohttp'
|
||||
wheelsRequirement: 'requirements_wheels.txt'
|
||||
wheelsRequirementDiff: 'requirements_diff.txt'
|
||||
@@ -90,4 +91,10 @@ jobs:
|
||||
sed -i "s|# bme680|bme680|g" ${requirement_file}
|
||||
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
|
||||
done
|
||||
|
||||
# Write env for build settings
|
||||
(
|
||||
echo "GRPC_BUILD_WITH_BORING_SSL_ASM=0"
|
||||
echo "GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1"
|
||||
) > .env_file
|
||||
displayName: 'Prepare requirements files for Home Assistant wheels'
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
"abort": {
|
||||
"already_setup": "You can only configure one Almond account.",
|
||||
"cannot_connect": "Unable to connect to the Almond server.",
|
||||
"missing_configuration": "Please check the documentation on how to set up Almond."
|
||||
"missing_configuration": "Please check the documentation on how to set up Almond.",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ from arcam.fmj.state import State
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaPlayerEntity
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_CLASS_MUSIC,
|
||||
MEDIA_TYPE_MUSIC,
|
||||
SUPPORT_BROWSE_MEDIA,
|
||||
SUPPORT_PLAY_MEDIA,
|
||||
@@ -255,6 +257,7 @@ class ArcamFmj(MediaPlayerEntity):
|
||||
radio = [
|
||||
BrowseMedia(
|
||||
title=preset.name,
|
||||
media_class=MEDIA_CLASS_MUSIC,
|
||||
media_content_id=f"preset:{preset.index}",
|
||||
media_content_type=MEDIA_TYPE_MUSIC,
|
||||
can_play=True,
|
||||
@@ -265,6 +268,7 @@ class ArcamFmj(MediaPlayerEntity):
|
||||
|
||||
root = BrowseMedia(
|
||||
title="Root",
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_id="root",
|
||||
media_content_type="library",
|
||||
can_play=False,
|
||||
|
||||
@@ -13,6 +13,7 @@ from homeassistant.const import (
|
||||
CONF_ID,
|
||||
CONF_MODE,
|
||||
CONF_PLATFORM,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
SERVICE_RELOAD,
|
||||
@@ -29,7 +30,7 @@ from homeassistant.core import (
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import condition, extract_domain_configs
|
||||
from homeassistant.helpers import condition, extract_domain_configs, template
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
@@ -44,6 +45,7 @@ from homeassistant.helpers.script import (
|
||||
Script,
|
||||
make_script_schema,
|
||||
)
|
||||
from homeassistant.helpers.script_variables import ScriptVariables
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.helpers.trigger import async_initialize_triggers
|
||||
from homeassistant.helpers.typing import TemplateVarsType
|
||||
@@ -104,6 +106,7 @@ PLATFORM_SCHEMA = vol.All(
|
||||
vol.Optional(CONF_HIDE_ENTITY): cv.boolean,
|
||||
vol.Required(CONF_TRIGGER): cv.TRIGGER_SCHEMA,
|
||||
vol.Optional(CONF_CONDITION): _CONDITION_SCHEMA,
|
||||
vol.Optional(CONF_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA,
|
||||
vol.Required(CONF_ACTION): cv.SCRIPT_SCHEMA,
|
||||
},
|
||||
SCRIPT_MODE_SINGLE,
|
||||
@@ -239,6 +242,7 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
||||
cond_func,
|
||||
action_script,
|
||||
initial_state,
|
||||
variables,
|
||||
):
|
||||
"""Initialize an automation entity."""
|
||||
self._id = automation_id
|
||||
@@ -253,6 +257,7 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
||||
self._referenced_entities: Optional[Set[str]] = None
|
||||
self._referenced_devices: Optional[Set[str]] = None
|
||||
self._logger = _LOGGER
|
||||
self._variables: ScriptVariables = variables
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -378,11 +383,20 @@ class AutomationEntity(ToggleEntity, RestoreEntity):
|
||||
else:
|
||||
await self.async_disable()
|
||||
|
||||
async def async_trigger(self, variables, context=None, skip_condition=False):
|
||||
async def async_trigger(self, run_variables, context=None, skip_condition=False):
|
||||
"""Trigger automation.
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
if self._variables:
|
||||
try:
|
||||
variables = self._variables.async_render(self.hass, run_variables)
|
||||
except template.TemplateError as err:
|
||||
self._logger.error("Error rendering variables: %s", err)
|
||||
return
|
||||
else:
|
||||
variables = run_variables
|
||||
|
||||
if (
|
||||
not skip_condition
|
||||
and self._cond_func is not None
|
||||
@@ -518,6 +532,9 @@ async def _async_process_config(hass, config, component):
|
||||
max_runs=config_block[CONF_MAX],
|
||||
max_exceeded=config_block[CONF_MAX_EXCEEDED],
|
||||
logger=_LOGGER,
|
||||
# We don't pass variables here
|
||||
# Automation will already render them to use them in the condition
|
||||
# and so will pass them on to the script.
|
||||
)
|
||||
|
||||
if CONF_CONDITION in config_block:
|
||||
@@ -535,6 +552,7 @@ async def _async_process_config(hass, config, component):
|
||||
cond_func,
|
||||
action_script,
|
||||
initial_state,
|
||||
config_block.get(CONF_VARIABLES),
|
||||
)
|
||||
|
||||
entities.append(entity)
|
||||
|
||||
@@ -4,7 +4,11 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/axis",
|
||||
"requirements": ["axis==35"],
|
||||
"zeroconf": ["_axis-video._tcp.local."],
|
||||
"zeroconf": [
|
||||
{"type":"_axis-video._tcp.local.","macaddress":"00408C*"},
|
||||
{"type":"_axis-video._tcp.local.","macaddress":"ACCC8E*"},
|
||||
{"type":"_axis-video._tcp.local.","macaddress":"B8A44F*"}
|
||||
],
|
||||
"after_dependencies": ["mqtt"],
|
||||
"codeowners": ["@Kane610"]
|
||||
}
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
"""The bayesian component."""
|
||||
|
||||
DOMAIN = "bayesian"
|
||||
PLATFORMS = ["binary_sensor"]
|
||||
|
||||
@@ -25,8 +25,11 @@ from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_template_result,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.template import result_as_boolean
|
||||
|
||||
from . import DOMAIN, PLATFORMS
|
||||
|
||||
ATTR_OBSERVATIONS = "observations"
|
||||
ATTR_OCCURRED_OBSERVATION_ENTITIES = "occurred_observation_entities"
|
||||
ATTR_PROBABILITY = "probability"
|
||||
@@ -106,6 +109,8 @@ def update_probability(prior, prob_given_true, prob_given_false):
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up the Bayesian Binary sensor."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
|
||||
name = config[CONF_NAME]
|
||||
observations = config[CONF_OBSERVATIONS]
|
||||
prior = config[CONF_PRIOR]
|
||||
|
||||
2
homeassistant/components/bayesian/services.yaml
Normal file
2
homeassistant/components/bayesian/services.yaml
Normal file
@@ -0,0 +1,2 @@
|
||||
reload:
|
||||
description: Reload all bayesian entities.
|
||||
@@ -4,7 +4,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/brother",
|
||||
"codeowners": ["@bieniu"],
|
||||
"requirements": ["brother==0.1.17"],
|
||||
"zeroconf": ["_printer._tcp.local."],
|
||||
"zeroconf": [{"type": "_printer._tcp.local.", "name":"brother*"}],
|
||||
"config_flow": true,
|
||||
"quality_scale": "platinum"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "cloud",
|
||||
"name": "Home Assistant Cloud",
|
||||
"documentation": "https://www.home-assistant.io/integrations/cloud",
|
||||
"requirements": ["hass-nabucasa==0.36.1"],
|
||||
"requirements": ["hass-nabucasa==0.37.0"],
|
||||
"dependencies": ["http", "webhook", "alexa"],
|
||||
"after_dependencies": ["google_assistant"],
|
||||
"codeowners": ["@home-assistant/cloud"]
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
"name": "De Lijn",
|
||||
"documentation": "https://www.home-assistant.io/integrations/delijn",
|
||||
"codeowners": ["@bollewolle", "@Emilv2"],
|
||||
"requirements": ["pydelijn==0.6.0"]
|
||||
"requirements": ["pydelijn==0.6.1"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/doorbird",
|
||||
"requirements": ["doorbirdpy==2.1.0"],
|
||||
"dependencies": ["http"],
|
||||
"zeroconf": ["_axis-video._tcp.local."],
|
||||
"zeroconf": [{"type":"_axis-video._tcp.local.","macaddress":"1CCAE3*"}],
|
||||
"codeowners": ["@oblogic7", "@bdraco"],
|
||||
"config_flow": true
|
||||
}
|
||||
|
||||
@@ -185,7 +185,9 @@ def request_app_setup(hass, config, add_entities, config_path, discovery_info=No
|
||||
else:
|
||||
setup_platform(hass, config, add_entities, discovery_info)
|
||||
|
||||
start_url = f"{get_url(hass)}{FITBIT_AUTH_CALLBACK_PATH}"
|
||||
start_url = (
|
||||
f"{get_url(hass, require_current_request=True)}{FITBIT_AUTH_CALLBACK_PATH}"
|
||||
)
|
||||
|
||||
description = f"""Please create a Fitbit developer app at
|
||||
https://dev.fitbit.com/apps/new.
|
||||
@@ -220,7 +222,7 @@ def request_oauth_completion(hass):
|
||||
def fitbit_configuration_callback(callback_data):
|
||||
"""Handle configuration updates."""
|
||||
|
||||
start_url = f"{get_url(hass)}{FITBIT_AUTH_START}"
|
||||
start_url = f"{get_url(hass, require_current_request=True)}{FITBIT_AUTH_START}"
|
||||
|
||||
description = f"Please authorize Fitbit by visiting {start_url}"
|
||||
|
||||
@@ -312,7 +314,9 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
config_file.get(CONF_CLIENT_ID), config_file.get(CONF_CLIENT_SECRET)
|
||||
)
|
||||
|
||||
redirect_uri = f"{get_url(hass)}{FITBIT_AUTH_CALLBACK_PATH}"
|
||||
redirect_uri = (
|
||||
f"{get_url(hass, require_current_request=True)}{FITBIT_AUTH_CALLBACK_PATH}"
|
||||
)
|
||||
|
||||
fitbit_auth_start_url, _ = oauth.authorize_token_url(
|
||||
redirect_uri=redirect_uri,
|
||||
@@ -357,7 +361,7 @@ class FitbitAuthCallbackView(HomeAssistantView):
|
||||
|
||||
result = None
|
||||
if data.get("code") is not None:
|
||||
redirect_uri = f"{get_url(hass)}{FITBIT_AUTH_CALLBACK_PATH}"
|
||||
redirect_uri = f"{get_url(hass, require_current_request=True)}{FITBIT_AUTH_CALLBACK_PATH}"
|
||||
|
||||
try:
|
||||
result = self.oauth.fetch_access_token(data.get("code"), redirect_uri)
|
||||
|
||||
@@ -70,8 +70,6 @@ MANIFEST_JSON = {
|
||||
|
||||
DATA_PANELS = "frontend_panels"
|
||||
DATA_JS_VERSION = "frontend_js_version"
|
||||
DATA_EXTRA_HTML_URL = "frontend_extra_html_url"
|
||||
DATA_EXTRA_HTML_URL_ES5 = "frontend_extra_html_url_es5"
|
||||
DATA_EXTRA_MODULE_URL = "frontend_extra_module_url"
|
||||
DATA_EXTRA_JS_URL_ES5 = "frontend_extra_js_url_es5"
|
||||
|
||||
@@ -91,29 +89,23 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.All(
|
||||
cv.deprecated(CONF_EXTRA_HTML_URL, invalidation_version="0.115"),
|
||||
cv.deprecated(CONF_EXTRA_HTML_URL_ES5, invalidation_version="0.115"),
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_FRONTEND_REPO): cv.isdir,
|
||||
vol.Optional(CONF_THEMES): vol.Schema(
|
||||
{cv.string: {cv.string: cv.string}}
|
||||
),
|
||||
vol.Optional(CONF_EXTRA_HTML_URL): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_EXTRA_MODULE_URL): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_EXTRA_JS_URL_ES5): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
# We no longer use these options.
|
||||
vol.Optional(CONF_EXTRA_HTML_URL_ES5): cv.match_all,
|
||||
vol.Optional(CONF_JS_VERSION): cv.match_all,
|
||||
},
|
||||
),
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_FRONTEND_REPO): cv.isdir,
|
||||
vol.Optional(CONF_THEMES): vol.Schema(
|
||||
{cv.string: {cv.string: cv.string}}
|
||||
),
|
||||
vol.Optional(CONF_EXTRA_MODULE_URL): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_EXTRA_JS_URL_ES5): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
# We no longer use these options.
|
||||
vol.Optional(CONF_EXTRA_HTML_URL): cv.match_all,
|
||||
vol.Optional(CONF_EXTRA_HTML_URL_ES5): cv.match_all,
|
||||
vol.Optional(CONF_JS_VERSION): cv.match_all,
|
||||
},
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
@@ -220,17 +212,6 @@ def async_remove_panel(hass, frontend_url_path):
|
||||
hass.bus.async_fire(EVENT_PANELS_UPDATED)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@callback
|
||||
def add_extra_html_url(hass, url, es5=False):
|
||||
"""Register extra html url to load."""
|
||||
key = DATA_EXTRA_HTML_URL_ES5 if es5 else DATA_EXTRA_HTML_URL
|
||||
url_set = hass.data.get(key)
|
||||
if url_set is None:
|
||||
url_set = hass.data[key] = set()
|
||||
url_set.add(url)
|
||||
|
||||
|
||||
def add_extra_js_url(hass, url, es5=False):
|
||||
"""Register extra js or module url to load."""
|
||||
key = DATA_EXTRA_JS_URL_ES5 if es5 else DATA_EXTRA_MODULE_URL
|
||||
@@ -267,6 +248,13 @@ async def async_setup(hass, config):
|
||||
|
||||
conf = config.get(DOMAIN, {})
|
||||
|
||||
for key in (CONF_EXTRA_HTML_URL, CONF_EXTRA_HTML_URL_ES5, CONF_JS_VERSION):
|
||||
if key in conf:
|
||||
_LOGGER.error(
|
||||
"Please remove %s from your frontend config. It is no longer supported",
|
||||
key,
|
||||
)
|
||||
|
||||
repo_path = conf.get(CONF_FRONTEND_REPO)
|
||||
is_dev = repo_path is not None
|
||||
root_path = _frontend_root(repo_path)
|
||||
@@ -315,12 +303,6 @@ async def async_setup(hass, config):
|
||||
sidebar_icon="hass:hammer",
|
||||
)
|
||||
|
||||
if DATA_EXTRA_HTML_URL not in hass.data:
|
||||
hass.data[DATA_EXTRA_HTML_URL] = set()
|
||||
|
||||
for url in conf.get(CONF_EXTRA_HTML_URL, []):
|
||||
add_extra_html_url(hass, url, False)
|
||||
|
||||
if DATA_EXTRA_MODULE_URL not in hass.data:
|
||||
hass.data[DATA_EXTRA_MODULE_URL] = set()
|
||||
|
||||
@@ -522,7 +504,6 @@ class IndexView(web_urldispatcher.AbstractResource):
|
||||
return web.Response(
|
||||
text=template.render(
|
||||
theme_color=MANIFEST_JSON["theme_color"],
|
||||
extra_urls=hass.data[DATA_EXTRA_HTML_URL],
|
||||
extra_modules=hass.data[DATA_EXTRA_MODULE_URL],
|
||||
extra_js_es5=hass.data[DATA_EXTRA_JS_URL_ES5],
|
||||
),
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"requirements": ["home-assistant-frontend==20200907.0"],
|
||||
"requirements": ["home-assistant-frontend==20200909.0"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
"auth",
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"missing_configuration": "The Home Connect component is not configured. Please follow the documentation."
|
||||
"missing_configuration": "The Home Connect component is not configured. Please follow the documentation.",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "Successfully authenticated with Home Connect."
|
||||
|
||||
@@ -28,11 +28,15 @@ async def async_attach_trigger(
|
||||
):
|
||||
"""Listen for events based on configuration."""
|
||||
event_type = config.get(CONF_EVENT_TYPE)
|
||||
event_data_schema = (
|
||||
vol.Schema(config.get(CONF_EVENT_DATA), extra=vol.ALLOW_EXTRA)
|
||||
if config.get(CONF_EVENT_DATA)
|
||||
else None
|
||||
)
|
||||
event_data_schema = None
|
||||
if config.get(CONF_EVENT_DATA):
|
||||
event_data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(key): value
|
||||
for key, value in config.get(CONF_EVENT_DATA).items()
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@callback
|
||||
def handle_event(event):
|
||||
|
||||
@@ -80,6 +80,13 @@ async def async_attach_trigger(
|
||||
else:
|
||||
new_value = to_s.attributes.get(attribute)
|
||||
|
||||
# When we listen for state changes with `match_all`, we
|
||||
# will trigger even if just an attribute changes. When
|
||||
# we listen to just an attribute, we should ignore all
|
||||
# other attribute changes.
|
||||
if attribute is not None and old_value == new_value:
|
||||
return
|
||||
|
||||
if (
|
||||
not match_from_state(old_value)
|
||||
or not match_to_state(new_value)
|
||||
|
||||
@@ -38,7 +38,7 @@ from homeassistant.helpers import device_registry, entity_registry
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entityfilter import BASE_FILTER_SCHEMA, FILTER_SCHEMA
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.loader import async_get_integration
|
||||
from homeassistant.loader import IntegrationNotFound, async_get_integration
|
||||
from homeassistant.util import get_local_ip
|
||||
|
||||
from .accessories import get_accessory
|
||||
@@ -712,8 +712,13 @@ class HomeKit:
|
||||
if dev_reg_ent.sw_version:
|
||||
ent_cfg[ATTR_SOFTWARE_VERSION] = dev_reg_ent.sw_version
|
||||
if ATTR_MANUFACTURER not in ent_cfg:
|
||||
integration = await async_get_integration(self.hass, ent_reg_ent.platform)
|
||||
ent_cfg[ATTR_INTERGRATION] = integration.name
|
||||
try:
|
||||
integration = await async_get_integration(
|
||||
self.hass, ent_reg_ent.platform
|
||||
)
|
||||
ent_cfg[ATTR_INTERGRATION] = integration.name
|
||||
except IntegrationNotFound:
|
||||
ent_cfg[ATTR_INTERGRATION] = ent_reg_ent.platform
|
||||
|
||||
|
||||
class HomeKitPairingQRView(HomeAssistantView):
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
"""Support for the Philips Hue system."""
|
||||
import ipaddress
|
||||
import logging
|
||||
|
||||
from aiohue.util import normalize_bridge_id
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries, core
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .bridge import HueBridge
|
||||
from .const import (
|
||||
@@ -21,80 +18,10 @@ from .const import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_BRIDGES = "bridges"
|
||||
|
||||
DATA_CONFIGS = "hue_configs"
|
||||
|
||||
PHUE_CONFIG_FILE = "phue.conf"
|
||||
|
||||
BRIDGE_CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
# Validate as IP address and then convert back to a string.
|
||||
vol.Required(CONF_HOST): vol.All(ipaddress.ip_address, cv.string),
|
||||
vol.Optional(CONF_ALLOW_UNREACHABLE): cv.boolean,
|
||||
vol.Optional(CONF_ALLOW_HUE_GROUPS): cv.boolean,
|
||||
vol.Optional("filename"): str,
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
vol.All(
|
||||
cv.deprecated(DOMAIN, invalidation_version="0.115.0"),
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_BRIDGES): vol.All(
|
||||
cv.ensure_list,
|
||||
[BRIDGE_CONFIG_SCHEMA],
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
),
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the Hue platform."""
|
||||
conf = config.get(DOMAIN)
|
||||
if conf is None:
|
||||
conf = {}
|
||||
|
||||
hass.data[DOMAIN] = {}
|
||||
hass.data[DATA_CONFIGS] = {}
|
||||
|
||||
# User has not configured bridges
|
||||
if CONF_BRIDGES not in conf:
|
||||
return True
|
||||
|
||||
bridges = conf[CONF_BRIDGES]
|
||||
|
||||
configured_hosts = {
|
||||
entry.data.get("host") for entry in hass.config_entries.async_entries(DOMAIN)
|
||||
}
|
||||
|
||||
for bridge_conf in bridges:
|
||||
host = bridge_conf[CONF_HOST]
|
||||
|
||||
# Store config in hass.data so the config entry can find it
|
||||
hass.data[DATA_CONFIGS][host] = bridge_conf
|
||||
|
||||
if host in configured_hosts:
|
||||
continue
|
||||
|
||||
# No existing config entry found, trigger link config flow. Because we're
|
||||
# inside the setup of this component we'll have to use hass.async_add_job
|
||||
# to avoid a deadlock: creating a config entry will set up the component
|
||||
# but the setup would block till the entry is created!
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data={"host": bridge_conf[CONF_HOST]},
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -102,8 +29,6 @@ async def async_setup_entry(
|
||||
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
|
||||
):
|
||||
"""Set up a bridge from a config entry."""
|
||||
host = entry.data["host"]
|
||||
config = hass.data[DATA_CONFIGS].get(host)
|
||||
|
||||
# Migrate allow_unreachable from config entry data to config entry options
|
||||
if (
|
||||
@@ -133,27 +58,6 @@ async def async_setup_entry(
|
||||
data.pop(CONF_ALLOW_HUE_GROUPS)
|
||||
hass.config_entries.async_update_entry(entry, data=data, options=options)
|
||||
|
||||
# Overwrite from YAML configuration
|
||||
if config is not None:
|
||||
options = {}
|
||||
if CONF_ALLOW_HUE_GROUPS in config and (
|
||||
CONF_ALLOW_HUE_GROUPS not in entry.options
|
||||
or config[CONF_ALLOW_HUE_GROUPS] != entry.options[CONF_ALLOW_HUE_GROUPS]
|
||||
):
|
||||
options[CONF_ALLOW_HUE_GROUPS] = config[CONF_ALLOW_HUE_GROUPS]
|
||||
|
||||
if CONF_ALLOW_UNREACHABLE in config and (
|
||||
CONF_ALLOW_UNREACHABLE not in entry.options
|
||||
or config[CONF_ALLOW_UNREACHABLE] != entry.options[CONF_ALLOW_UNREACHABLE]
|
||||
):
|
||||
options[CONF_ALLOW_UNREACHABLE] = config[CONF_ALLOW_UNREACHABLE]
|
||||
|
||||
if options:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
options={**entry.options, **options},
|
||||
)
|
||||
|
||||
bridge = HueBridge(hass, entry)
|
||||
|
||||
if not await bridge.async_setup():
|
||||
|
||||
@@ -215,7 +215,7 @@ class PowerViewShade(ShadeEntity, CoverEntity):
|
||||
def _async_update_current_cover_position(self):
|
||||
"""Update the current cover position from the data."""
|
||||
_LOGGER.debug("Raw data update: %s", self._shade.raw_data)
|
||||
position_data = self._shade.raw_data[ATTR_POSITION_DATA]
|
||||
position_data = self._shade.raw_data.get(ATTR_POSITION_DATA, {})
|
||||
if ATTR_POSITION1 in position_data:
|
||||
self._current_cover_position = position_data[ATTR_POSITION1]
|
||||
self._is_opening = False
|
||||
|
||||
@@ -49,7 +49,6 @@ from .const import (
|
||||
)
|
||||
from .entity import ISYNodeEntity, ISYProgramEntity
|
||||
from .helpers import migrate_old_unique_ids
|
||||
from .services import async_setup_device_services
|
||||
|
||||
DEVICE_PARENT_REQUIRED = [
|
||||
DEVICE_CLASS_OPENING,
|
||||
@@ -172,7 +171,6 @@ async def async_setup_entry(
|
||||
|
||||
await migrate_old_unique_ids(hass, BINARY_SENSOR, devices)
|
||||
async_add_entities(devices)
|
||||
async_setup_device_services(hass)
|
||||
|
||||
|
||||
def _detect_device_type_and_class(node: Union[Group, Node]) -> (str, str):
|
||||
|
||||
@@ -52,7 +52,6 @@ from .const import (
|
||||
)
|
||||
from .entity import ISYNodeEntity
|
||||
from .helpers import convert_isy_value_to_hass, migrate_old_unique_ids
|
||||
from .services import async_setup_device_services
|
||||
|
||||
ISY_SUPPORTED_FEATURES = (
|
||||
SUPPORT_FAN_MODE | SUPPORT_TARGET_TEMPERATURE | SUPPORT_TARGET_TEMPERATURE_RANGE
|
||||
@@ -73,7 +72,6 @@ async def async_setup_entry(
|
||||
|
||||
await migrate_old_unique_ids(hass, CLIMATE, entities)
|
||||
async_add_entities(entities)
|
||||
async_setup_device_services(hass)
|
||||
|
||||
|
||||
class ISYThermostatEntity(ISYNodeEntity, ClimateEntity):
|
||||
|
||||
@@ -24,7 +24,6 @@ from .const import (
|
||||
)
|
||||
from .entity import ISYNodeEntity, ISYProgramEntity
|
||||
from .helpers import migrate_old_unique_ids
|
||||
from .services import async_setup_device_services
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -43,7 +42,6 @@ async def async_setup_entry(
|
||||
|
||||
await migrate_old_unique_ids(hass, COVER, devices)
|
||||
async_add_entities(devices)
|
||||
async_setup_device_services(hass)
|
||||
|
||||
|
||||
class ISYCoverEntity(ISYNodeEntity, CoverEntity):
|
||||
|
||||
@@ -18,7 +18,6 @@ from homeassistant.helpers.typing import HomeAssistantType
|
||||
from .const import _LOGGER, DOMAIN as ISY994_DOMAIN, ISY994_NODES, ISY994_PROGRAMS
|
||||
from .entity import ISYNodeEntity, ISYProgramEntity
|
||||
from .helpers import migrate_old_unique_ids
|
||||
from .services import async_setup_device_services
|
||||
|
||||
VALUE_TO_STATE = {
|
||||
0: SPEED_OFF,
|
||||
@@ -51,7 +50,6 @@ async def async_setup_entry(
|
||||
|
||||
await migrate_old_unique_ids(hass, FAN, devices)
|
||||
async_add_entities(devices)
|
||||
async_setup_device_services(hass)
|
||||
|
||||
|
||||
class ISYFanEntity(ISYNodeEntity, FanEntity):
|
||||
|
||||
@@ -20,7 +20,7 @@ from .const import (
|
||||
)
|
||||
from .entity import ISYNodeEntity
|
||||
from .helpers import migrate_old_unique_ids
|
||||
from .services import async_setup_device_services, async_setup_light_services
|
||||
from .services import async_setup_light_services
|
||||
|
||||
ATTR_LAST_BRIGHTNESS = "last_brightness"
|
||||
|
||||
@@ -41,7 +41,6 @@ async def async_setup_entry(
|
||||
|
||||
await migrate_old_unique_ids(hass, LIGHT, devices)
|
||||
async_add_entities(devices)
|
||||
async_setup_device_services(hass)
|
||||
async_setup_light_services(hass)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from homeassistant.helpers.typing import HomeAssistantType
|
||||
from .const import _LOGGER, DOMAIN as ISY994_DOMAIN, ISY994_NODES, ISY994_PROGRAMS
|
||||
from .entity import ISYNodeEntity, ISYProgramEntity
|
||||
from .helpers import migrate_old_unique_ids
|
||||
from .services import async_setup_device_services
|
||||
|
||||
VALUE_TO_STATE = {0: False, 100: True}
|
||||
|
||||
@@ -31,7 +30,6 @@ async def async_setup_entry(
|
||||
|
||||
await migrate_old_unique_ids(hass, LOCK, devices)
|
||||
async_add_entities(devices)
|
||||
async_setup_device_services(hass)
|
||||
|
||||
|
||||
class ISYLockEntity(ISYNodeEntity, LockEntity):
|
||||
|
||||
@@ -19,7 +19,6 @@ from .const import (
|
||||
)
|
||||
from .entity import ISYEntity, ISYNodeEntity
|
||||
from .helpers import convert_isy_value_to_hass, migrate_old_unique_ids
|
||||
from .services import async_setup_device_services
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -40,7 +39,6 @@ async def async_setup_entry(
|
||||
|
||||
await migrate_old_unique_ids(hass, SENSOR, devices)
|
||||
async_add_entities(devices)
|
||||
async_setup_device_services(hass)
|
||||
|
||||
|
||||
class ISYSensorEntity(ISYNodeEntity):
|
||||
|
||||
@@ -13,9 +13,10 @@ from homeassistant.const import (
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
SERVICE_RELOAD,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import ServiceCall, callback
|
||||
from homeassistant.helpers import entity_platform
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import async_get_platforms
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
@@ -353,6 +354,30 @@ def async_setup_services(hass: HomeAssistantType):
|
||||
domain=DOMAIN, service=SERVICE_RELOAD, service_func=async_reload_config_entries
|
||||
)
|
||||
|
||||
async def _async_send_raw_node_command(call: ServiceCall):
|
||||
await hass.helpers.service.entity_service_call(
|
||||
async_get_platforms(hass, DOMAIN), SERVICE_SEND_RAW_NODE_COMMAND, call
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
domain=DOMAIN,
|
||||
service=SERVICE_SEND_RAW_NODE_COMMAND,
|
||||
schema=cv.make_entity_service_schema(SERVICE_SEND_RAW_NODE_COMMAND_SCHEMA),
|
||||
service_func=_async_send_raw_node_command,
|
||||
)
|
||||
|
||||
async def _async_send_node_command(call: ServiceCall):
|
||||
await hass.helpers.service.entity_service_call(
|
||||
async_get_platforms(hass, DOMAIN), SERVICE_SEND_NODE_COMMAND, call
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
domain=DOMAIN,
|
||||
service=SERVICE_SEND_NODE_COMMAND,
|
||||
schema=cv.make_entity_service_schema(SERVICE_SEND_NODE_COMMAND_SCHEMA),
|
||||
service_func=_async_send_node_command,
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_unload_services(hass: HomeAssistantType):
|
||||
@@ -374,23 +399,8 @@ def async_unload_services(hass: HomeAssistantType):
|
||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SET_VARIABLE)
|
||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_CLEANUP)
|
||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_RELOAD)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_device_services(hass: HomeAssistantType):
|
||||
"""Create device-specific services for the ISY Integration."""
|
||||
platform = entity_platform.current_platform.get()
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SEND_RAW_NODE_COMMAND,
|
||||
SERVICE_SEND_RAW_NODE_COMMAND_SCHEMA,
|
||||
SERVICE_SEND_RAW_NODE_COMMAND,
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SEND_NODE_COMMAND,
|
||||
SERVICE_SEND_NODE_COMMAND_SCHEMA,
|
||||
SERVICE_SEND_NODE_COMMAND,
|
||||
)
|
||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_RAW_NODE_COMMAND)
|
||||
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_NODE_COMMAND)
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -10,7 +10,6 @@ from homeassistant.helpers.typing import HomeAssistantType
|
||||
from .const import _LOGGER, DOMAIN as ISY994_DOMAIN, ISY994_NODES, ISY994_PROGRAMS
|
||||
from .entity import ISYNodeEntity, ISYProgramEntity
|
||||
from .helpers import migrate_old_unique_ids
|
||||
from .services import async_setup_device_services
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -29,7 +28,6 @@ async def async_setup_entry(
|
||||
|
||||
await migrate_old_unique_ids(hass, SWITCH, devices)
|
||||
async_add_entities(devices)
|
||||
async_setup_device_services(hass)
|
||||
|
||||
|
||||
class ISYSwitchEntity(ISYNodeEntity, SwitchEntity):
|
||||
|
||||
@@ -148,6 +148,12 @@ async def async_setup(hass, config):
|
||||
discovery.async_load_platform(hass, platform.value, DOMAIN, {}, config)
|
||||
)
|
||||
|
||||
if not hass.data[DATA_KNX].xknx.devices:
|
||||
_LOGGER.warning(
|
||||
"No KNX devices are configured. Please read "
|
||||
"https://www.home-assistant.io/blog/2020/09/17/release-115/#breaking-changes"
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_KNX_SEND,
|
||||
|
||||
@@ -1,7 +1,18 @@
|
||||
"""Support for media browsing."""
|
||||
import logging
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia
|
||||
from homeassistant.components.media_player import BrowseError, BrowseMedia
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_ALBUM,
|
||||
MEDIA_CLASS_ARTIST,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_CLASS_EPISODE,
|
||||
MEDIA_CLASS_MOVIE,
|
||||
MEDIA_CLASS_MUSIC,
|
||||
MEDIA_CLASS_PLAYLIST,
|
||||
MEDIA_CLASS_SEASON,
|
||||
MEDIA_CLASS_TRACK,
|
||||
MEDIA_CLASS_TV_SHOW,
|
||||
MEDIA_TYPE_ALBUM,
|
||||
MEDIA_TYPE_ARTIST,
|
||||
MEDIA_TYPE_EPISODE,
|
||||
@@ -18,13 +29,30 @@ PLAYABLE_MEDIA_TYPES = [
|
||||
MEDIA_TYPE_TRACK,
|
||||
]
|
||||
|
||||
EXPANDABLE_MEDIA_TYPES = [
|
||||
MEDIA_TYPE_ALBUM,
|
||||
MEDIA_TYPE_ARTIST,
|
||||
MEDIA_TYPE_PLAYLIST,
|
||||
MEDIA_TYPE_TVSHOW,
|
||||
MEDIA_TYPE_SEASON,
|
||||
]
|
||||
CONTAINER_TYPES_SPECIFIC_MEDIA_CLASS = {
|
||||
MEDIA_TYPE_ALBUM: MEDIA_CLASS_ALBUM,
|
||||
MEDIA_TYPE_ARTIST: MEDIA_CLASS_ARTIST,
|
||||
MEDIA_TYPE_PLAYLIST: MEDIA_CLASS_PLAYLIST,
|
||||
MEDIA_TYPE_SEASON: MEDIA_CLASS_SEASON,
|
||||
MEDIA_TYPE_TVSHOW: MEDIA_CLASS_TV_SHOW,
|
||||
}
|
||||
|
||||
CHILD_TYPE_MEDIA_CLASS = {
|
||||
MEDIA_TYPE_SEASON: MEDIA_CLASS_SEASON,
|
||||
MEDIA_TYPE_ALBUM: MEDIA_CLASS_ALBUM,
|
||||
MEDIA_TYPE_ARTIST: MEDIA_CLASS_ARTIST,
|
||||
MEDIA_TYPE_MOVIE: MEDIA_CLASS_MOVIE,
|
||||
MEDIA_TYPE_PLAYLIST: MEDIA_CLASS_PLAYLIST,
|
||||
MEDIA_TYPE_TRACK: MEDIA_CLASS_TRACK,
|
||||
MEDIA_TYPE_TVSHOW: MEDIA_CLASS_TV_SHOW,
|
||||
MEDIA_TYPE_EPISODE: MEDIA_CLASS_EPISODE,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UnknownMediaType(BrowseError):
|
||||
"""Unknown media type."""
|
||||
|
||||
|
||||
async def build_item_response(media_library, payload):
|
||||
@@ -121,18 +149,35 @@ async def build_item_response(media_library, payload):
|
||||
title = season["seasondetails"]["label"]
|
||||
|
||||
if media is None:
|
||||
return
|
||||
return None
|
||||
|
||||
return BrowseMedia(
|
||||
media_content_id=payload["search_id"],
|
||||
children = []
|
||||
for item in media:
|
||||
try:
|
||||
children.append(item_payload(item, media_library))
|
||||
except UnknownMediaType:
|
||||
pass
|
||||
|
||||
response = BrowseMedia(
|
||||
media_class=CONTAINER_TYPES_SPECIFIC_MEDIA_CLASS.get(
|
||||
search_type, MEDIA_CLASS_DIRECTORY
|
||||
),
|
||||
media_content_id=search_id,
|
||||
media_content_type=search_type,
|
||||
title=title,
|
||||
can_play=search_type in PLAYABLE_MEDIA_TYPES and search_id,
|
||||
can_expand=True,
|
||||
children=[item_payload(item, media_library) for item in media],
|
||||
children=children,
|
||||
thumbnail=thumbnail,
|
||||
)
|
||||
|
||||
if search_type == "library_music":
|
||||
response.children_media_class = MEDIA_CLASS_MUSIC
|
||||
else:
|
||||
response.calculate_children_class()
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def item_payload(item, media_library):
|
||||
"""
|
||||
@@ -140,43 +185,67 @@ def item_payload(item, media_library):
|
||||
|
||||
Used by async_browse_media.
|
||||
"""
|
||||
if "songid" in item:
|
||||
media_content_type = MEDIA_TYPE_TRACK
|
||||
media_content_id = f"{item['songid']}"
|
||||
elif "albumid" in item:
|
||||
media_content_type = MEDIA_TYPE_ALBUM
|
||||
media_content_id = f"{item['albumid']}"
|
||||
elif "artistid" in item:
|
||||
media_content_type = MEDIA_TYPE_ARTIST
|
||||
media_content_id = f"{item['artistid']}"
|
||||
elif "movieid" in item:
|
||||
media_content_type = MEDIA_TYPE_MOVIE
|
||||
media_content_id = f"{item['movieid']}"
|
||||
elif "episodeid" in item:
|
||||
media_content_type = MEDIA_TYPE_EPISODE
|
||||
media_content_id = f"{item['episodeid']}"
|
||||
elif "seasonid" in item:
|
||||
media_content_type = MEDIA_TYPE_SEASON
|
||||
media_content_id = f"{item['tvshowid']}/{item['season']}"
|
||||
elif "tvshowid" in item:
|
||||
media_content_type = MEDIA_TYPE_TVSHOW
|
||||
media_content_id = f"{item['tvshowid']}"
|
||||
else:
|
||||
# this case is for the top folder of each type
|
||||
# possible content types: album, artist, movie, library_music, tvshow
|
||||
media_content_type = item.get("type")
|
||||
media_content_id = ""
|
||||
|
||||
title = item["label"]
|
||||
can_play = media_content_type in PLAYABLE_MEDIA_TYPES and bool(media_content_id)
|
||||
can_expand = media_content_type in EXPANDABLE_MEDIA_TYPES
|
||||
|
||||
thumbnail = item.get("thumbnail")
|
||||
if thumbnail:
|
||||
thumbnail = media_library.thumbnail_url(thumbnail)
|
||||
|
||||
media_class = None
|
||||
|
||||
if "songid" in item:
|
||||
media_content_type = MEDIA_TYPE_TRACK
|
||||
media_content_id = f"{item['songid']}"
|
||||
can_play = True
|
||||
can_expand = False
|
||||
elif "albumid" in item:
|
||||
media_content_type = MEDIA_TYPE_ALBUM
|
||||
media_content_id = f"{item['albumid']}"
|
||||
can_play = True
|
||||
can_expand = True
|
||||
elif "artistid" in item:
|
||||
media_content_type = MEDIA_TYPE_ARTIST
|
||||
media_content_id = f"{item['artistid']}"
|
||||
can_play = True
|
||||
can_expand = True
|
||||
elif "movieid" in item:
|
||||
media_content_type = MEDIA_TYPE_MOVIE
|
||||
media_content_id = f"{item['movieid']}"
|
||||
can_play = True
|
||||
can_expand = False
|
||||
elif "episodeid" in item:
|
||||
media_content_type = MEDIA_TYPE_EPISODE
|
||||
media_content_id = f"{item['episodeid']}"
|
||||
can_play = True
|
||||
can_expand = False
|
||||
elif "seasonid" in item:
|
||||
media_content_type = MEDIA_TYPE_SEASON
|
||||
media_content_id = f"{item['tvshowid']}/{item['season']}"
|
||||
can_play = False
|
||||
can_expand = True
|
||||
elif "tvshowid" in item:
|
||||
media_content_type = MEDIA_TYPE_TVSHOW
|
||||
media_content_id = f"{item['tvshowid']}"
|
||||
can_play = False
|
||||
can_expand = True
|
||||
else:
|
||||
# this case is for the top folder of each type
|
||||
# possible content types: album, artist, movie, library_music, tvshow
|
||||
media_class = MEDIA_CLASS_DIRECTORY
|
||||
media_content_type = item["type"]
|
||||
media_content_id = ""
|
||||
can_play = False
|
||||
can_expand = True
|
||||
|
||||
if media_class is None:
|
||||
try:
|
||||
media_class = CHILD_TYPE_MEDIA_CLASS[media_content_type]
|
||||
except KeyError as err:
|
||||
_LOGGER.debug("Unknown media type received: %s", media_content_type)
|
||||
raise UnknownMediaType from err
|
||||
|
||||
return BrowseMedia(
|
||||
title=title,
|
||||
media_class=media_class,
|
||||
media_content_type=media_content_type,
|
||||
media_content_id=media_content_id,
|
||||
can_play=can_play,
|
||||
@@ -192,6 +261,7 @@ def library_payload(media_library):
|
||||
Used by async_browse_media.
|
||||
"""
|
||||
library_info = BrowseMedia(
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_id="library",
|
||||
media_content_type="library",
|
||||
title="Media Library",
|
||||
|
||||
@@ -85,6 +85,7 @@ from .const import (
|
||||
ATTR_SOUND_MODE,
|
||||
ATTR_SOUND_MODE_LIST,
|
||||
DOMAIN,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
SERVICE_CLEAR_PLAYLIST,
|
||||
SERVICE_PLAY_MEDIA,
|
||||
SERVICE_SELECT_SOUND_MODE,
|
||||
@@ -816,23 +817,10 @@ class MediaPlayerEntity(Entity):
|
||||
media_content_type: Optional[str] = None,
|
||||
media_content_id: Optional[str] = None,
|
||||
) -> "BrowseMedia":
|
||||
"""
|
||||
Return a payload for the "media_player/browse_media" websocket command.
|
||||
"""Return a BrowseMedia instance.
|
||||
|
||||
Payload should follow this format:
|
||||
{
|
||||
"title": str - Title of the item
|
||||
"media_content_type": str - see below
|
||||
"media_content_id": str - see below
|
||||
- Can be passed back in to browse further
|
||||
- Can be used as-is with media_player.play_media service
|
||||
"can_play": bool - If item is playable
|
||||
"can_expand": bool - If item contains other media
|
||||
"thumbnail": str (Optional) - URL to image thumbnail for item
|
||||
"children": list (Optional) - [{<item_with_keys_above>}, ...]
|
||||
}
|
||||
|
||||
Note: Children should omit the children key.
|
||||
The BrowseMedia instance will be used by the
|
||||
"media_player/browse_media" websocket command.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -1046,31 +1034,40 @@ class BrowseMedia:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
media_class: str,
|
||||
media_content_id: str,
|
||||
media_content_type: str,
|
||||
title: str,
|
||||
can_play: bool,
|
||||
can_expand: bool,
|
||||
children: Optional[List["BrowseMedia"]] = None,
|
||||
children_media_class: Optional[str] = None,
|
||||
thumbnail: Optional[str] = None,
|
||||
):
|
||||
"""Initialize browse media item."""
|
||||
self.media_class = media_class
|
||||
self.media_content_id = media_content_id
|
||||
self.media_content_type = media_content_type
|
||||
self.title = title
|
||||
self.can_play = can_play
|
||||
self.can_expand = can_expand
|
||||
self.children = children
|
||||
self.children_media_class = children_media_class
|
||||
self.thumbnail = thumbnail
|
||||
|
||||
def as_dict(self, *, parent: bool = True) -> dict:
|
||||
"""Convert Media class to browse media dictionary."""
|
||||
if self.children_media_class is None:
|
||||
self.calculate_children_class()
|
||||
|
||||
response = {
|
||||
"title": self.title,
|
||||
"media_class": self.media_class,
|
||||
"media_content_type": self.media_content_type,
|
||||
"media_content_id": self.media_content_id,
|
||||
"can_play": self.can_play,
|
||||
"can_expand": self.can_expand,
|
||||
"children_media_class": self.children_media_class,
|
||||
"thumbnail": self.thumbnail,
|
||||
}
|
||||
|
||||
@@ -1085,3 +1082,14 @@ class BrowseMedia:
|
||||
response["children"] = []
|
||||
|
||||
return response
|
||||
|
||||
def calculate_children_class(self) -> None:
|
||||
"""Count the children media classes and calculate the correct class."""
|
||||
if self.children is None or len(self.children) == 0:
|
||||
return
|
||||
|
||||
self.children_media_class = MEDIA_CLASS_DIRECTORY
|
||||
|
||||
proposed_class = self.children[0].media_class
|
||||
if all(child.media_class == proposed_class for child in self.children):
|
||||
self.children_media_class = proposed_class
|
||||
|
||||
@@ -29,6 +29,27 @@ ATTR_SOUND_MODE_LIST = "sound_mode_list"
|
||||
|
||||
DOMAIN = "media_player"
|
||||
|
||||
MEDIA_CLASS_ALBUM = "album"
|
||||
MEDIA_CLASS_APP = "app"
|
||||
MEDIA_CLASS_ARTIST = "artist"
|
||||
MEDIA_CLASS_CHANNEL = "channel"
|
||||
MEDIA_CLASS_COMPOSER = "composer"
|
||||
MEDIA_CLASS_CONTRIBUTING_ARTIST = "contributing_artist"
|
||||
MEDIA_CLASS_DIRECTORY = "directory"
|
||||
MEDIA_CLASS_EPISODE = "episode"
|
||||
MEDIA_CLASS_GAME = "game"
|
||||
MEDIA_CLASS_GENRE = "genre"
|
||||
MEDIA_CLASS_IMAGE = "image"
|
||||
MEDIA_CLASS_MOVIE = "movie"
|
||||
MEDIA_CLASS_MUSIC = "music"
|
||||
MEDIA_CLASS_PLAYLIST = "playlist"
|
||||
MEDIA_CLASS_PODCAST = "podcast"
|
||||
MEDIA_CLASS_SEASON = "season"
|
||||
MEDIA_CLASS_TRACK = "track"
|
||||
MEDIA_CLASS_TV_SHOW = "tv_show"
|
||||
MEDIA_CLASS_URL = "url"
|
||||
MEDIA_CLASS_VIDEO = "video"
|
||||
|
||||
MEDIA_TYPE_ALBUM = "album"
|
||||
MEDIA_TYPE_APP = "app"
|
||||
MEDIA_TYPE_APPS = "apps"
|
||||
|
||||
@@ -1,7 +1,18 @@
|
||||
"""Constants for the media_source integration."""
|
||||
import re
|
||||
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_IMAGE,
|
||||
MEDIA_CLASS_MUSIC,
|
||||
MEDIA_CLASS_VIDEO,
|
||||
)
|
||||
|
||||
DOMAIN = "media_source"
|
||||
MEDIA_MIME_TYPES = ("audio", "video", "image")
|
||||
MEDIA_CLASS_MAP = {
|
||||
"audio": MEDIA_CLASS_MUSIC,
|
||||
"video": MEDIA_CLASS_VIDEO,
|
||||
"image": MEDIA_CLASS_IMAGE,
|
||||
}
|
||||
URI_SCHEME = "media-source://"
|
||||
URI_SCHEME_REGEX = re.compile(r"^media-source://(?P<domain>[^/]+)?(?P<identifier>.+)?")
|
||||
|
||||
@@ -6,12 +6,13 @@ from typing import Tuple
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.media_player.const import MEDIA_CLASS_DIRECTORY
|
||||
from homeassistant.components.media_player.errors import BrowseError
|
||||
from homeassistant.components.media_source.error import Unresolvable
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util import sanitize_path
|
||||
|
||||
from .const import DOMAIN, MEDIA_MIME_TYPES
|
||||
from .const import DOMAIN, MEDIA_CLASS_MAP, MEDIA_MIME_TYPES
|
||||
from .models import BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia
|
||||
|
||||
|
||||
@@ -111,10 +112,15 @@ class LocalSource(MediaSource):
|
||||
if is_dir:
|
||||
title += "/"
|
||||
|
||||
media_class = MEDIA_CLASS_MAP.get(
|
||||
mime_type and mime_type.split("/")[0], MEDIA_CLASS_DIRECTORY
|
||||
)
|
||||
|
||||
media = BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=f"{source_dir_id}/{path.relative_to(self.hass.config.path('media'))}",
|
||||
media_content_type="directory",
|
||||
media_class=media_class,
|
||||
media_content_type=mime_type or "",
|
||||
title=title,
|
||||
can_play=is_file,
|
||||
can_expand=is_dir,
|
||||
@@ -130,6 +136,9 @@ class LocalSource(MediaSource):
|
||||
if child:
|
||||
media.children.append(child)
|
||||
|
||||
# Sort children showing directories first, then by name
|
||||
media.children.sort(key=lambda child: (child.can_play, child.title))
|
||||
|
||||
return media
|
||||
|
||||
|
||||
|
||||
@@ -5,6 +5,8 @@ from typing import List, Optional, Tuple
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_CHANNEL,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_TYPE_CHANNEL,
|
||||
MEDIA_TYPE_CHANNELS,
|
||||
)
|
||||
@@ -52,15 +54,18 @@ class MediaSourceItem:
|
||||
base = BrowseMediaSource(
|
||||
domain=None,
|
||||
identifier=None,
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_type=MEDIA_TYPE_CHANNELS,
|
||||
title="Media Sources",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MEDIA_CLASS_CHANNEL,
|
||||
)
|
||||
base.children = [
|
||||
BrowseMediaSource(
|
||||
domain=source.domain,
|
||||
identifier=None,
|
||||
media_class=MEDIA_CLASS_CHANNEL,
|
||||
media_content_type=MEDIA_TYPE_CHANNEL,
|
||||
title=source.name,
|
||||
can_play=False,
|
||||
|
||||
@@ -104,7 +104,6 @@ class MetWeather(CoordinatorEntity, WeatherEntity):
|
||||
self._config = config
|
||||
self._is_metric = is_metric
|
||||
self._hourly = hourly
|
||||
self._name_appendix = "-hourly" if hourly else ""
|
||||
|
||||
@property
|
||||
def track_home(self):
|
||||
@@ -114,23 +113,34 @@ class MetWeather(CoordinatorEntity, WeatherEntity):
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return unique ID."""
|
||||
name_appendix = ""
|
||||
if self._hourly:
|
||||
name_appendix = "-hourly"
|
||||
if self.track_home:
|
||||
return f"home{self._name_appendix}"
|
||||
return f"home{name_appendix}"
|
||||
|
||||
return f"{self._config[CONF_LATITUDE]}-{self._config[CONF_LONGITUDE]}{self._name_appendix}"
|
||||
return f"{self._config[CONF_LATITUDE]}-{self._config[CONF_LONGITUDE]}{name_appendix}"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
name = self._config.get(CONF_NAME)
|
||||
name_appendix = ""
|
||||
if self._hourly:
|
||||
name_appendix = " Hourly"
|
||||
|
||||
if name is not None:
|
||||
return f"{name}{self._name_appendix}"
|
||||
return f"{name}{name_appendix}"
|
||||
|
||||
if self.track_home:
|
||||
return f"{self.hass.config.location_name}{self._name_appendix}"
|
||||
return f"{self.hass.config.location_name}{name_appendix}"
|
||||
|
||||
return f"{DEFAULT_NAME}{self._name_appendix}"
|
||||
return f"{DEFAULT_NAME}{name_appendix}"
|
||||
|
||||
@property
|
||||
def entity_registry_enabled_default(self) -> bool:
|
||||
"""Return if the entity should be enabled when first added to the entity registry."""
|
||||
return not self._hourly
|
||||
|
||||
@property
|
||||
def condition(self):
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
{
|
||||
"disabled": "Dependency contains code that breaks Home Assistant.",
|
||||
"domain": "miflora",
|
||||
"name": "Mi Flora",
|
||||
"documentation": "https://www.home-assistant.io/integrations/miflora",
|
||||
"requirements": ["bluepy==1.3.0", "miflora==0.6.0"],
|
||||
"codeowners": ["@danielhiversen", "@ChristianKuehnel"]
|
||||
"requirements": ["bluepy==1.3.0", "miflora==0.7.0"],
|
||||
"codeowners": ["@danielhiversen", "@ChristianKuehnel", "@basnijholt"]
|
||||
}
|
||||
|
||||
@@ -1305,7 +1305,7 @@ class MqttDiscoveryUpdate(Entity):
|
||||
debug_info.add_entity_discovery_data(
|
||||
self.hass, self._discovery_data, self.entity_id
|
||||
)
|
||||
# Set in case the entity has been removed and is re-added
|
||||
# Set in case the entity has been removed and is re-added, for example when changing entity_id
|
||||
set_discovery_hash(self.hass, discovery_hash)
|
||||
self._remove_signal = async_dispatcher_connect(
|
||||
self.hass,
|
||||
|
||||
@@ -104,7 +104,7 @@ async def async_setup_platform(
|
||||
):
|
||||
"""Set up MQTT alarm control panel through configuration.yaml."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
await _async_setup_entity(config, async_add_entities)
|
||||
await _async_setup_entity(hass, config, async_add_entities)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
@@ -116,7 +116,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
try:
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
await _async_setup_entity(
|
||||
config, async_add_entities, config_entry, discovery_data
|
||||
hass, config, async_add_entities, config_entry, discovery_data
|
||||
)
|
||||
except Exception:
|
||||
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
|
||||
@@ -128,10 +128,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
||||
|
||||
async def _async_setup_entity(
|
||||
config, async_add_entities, config_entry=None, discovery_data=None
|
||||
hass, config, async_add_entities, config_entry=None, discovery_data=None
|
||||
):
|
||||
"""Set up the MQTT Alarm Control Panel platform."""
|
||||
async_add_entities([MqttAlarm(config, config_entry, discovery_data)])
|
||||
async_add_entities([MqttAlarm(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttAlarm(
|
||||
@@ -143,13 +143,16 @@ class MqttAlarm(
|
||||
):
|
||||
"""Representation of a MQTT alarm status."""
|
||||
|
||||
def __init__(self, config, config_entry, discovery_data):
|
||||
def __init__(self, hass, config, config_entry, discovery_data):
|
||||
"""Init the MQTT Alarm Control Panel."""
|
||||
self.hass = hass
|
||||
self._state = None
|
||||
self._config = config
|
||||
self._unique_id = config.get(CONF_UNIQUE_ID)
|
||||
self._sub_state = None
|
||||
|
||||
# Load config
|
||||
self._setup_from_config(config)
|
||||
|
||||
device_config = config.get(CONF_DEVICE)
|
||||
|
||||
MqttAttributes.__init__(self, config)
|
||||
@@ -165,26 +168,30 @@ class MqttAlarm(
|
||||
async def discovery_update(self, discovery_payload):
|
||||
"""Handle updated discovery message."""
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
self._config = config
|
||||
self._setup_from_config(config)
|
||||
await self.attributes_discovery_update(config)
|
||||
await self.availability_discovery_update(config)
|
||||
await self.device_info_discovery_update(config)
|
||||
await self._subscribe_topics()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
def _setup_from_config(self, config):
|
||||
self._config = config
|
||||
value_template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if value_template is not None:
|
||||
value_template.hass = self.hass
|
||||
command_template = self._config[CONF_COMMAND_TEMPLATE]
|
||||
command_template.hass = self.hass
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def message_received(msg):
|
||||
"""Run when new MQTT message has been received."""
|
||||
payload = msg.payload
|
||||
value_template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if value_template is not None:
|
||||
payload = value_template.async_render_with_possible_json_value(
|
||||
msg.payload, self._state
|
||||
|
||||
@@ -76,7 +76,7 @@ async def async_setup_platform(
|
||||
):
|
||||
"""Set up MQTT binary sensor through configuration.yaml."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
await _async_setup_entity(config, async_add_entities)
|
||||
await _async_setup_entity(hass, config, async_add_entities)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
@@ -88,7 +88,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
try:
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
await _async_setup_entity(
|
||||
config, async_add_entities, config_entry, discovery_data
|
||||
hass, config, async_add_entities, config_entry, discovery_data
|
||||
)
|
||||
except Exception:
|
||||
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
|
||||
@@ -100,10 +100,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
||||
|
||||
async def _async_setup_entity(
|
||||
config, async_add_entities, config_entry=None, discovery_data=None
|
||||
hass, config, async_add_entities, config_entry=None, discovery_data=None
|
||||
):
|
||||
"""Set up the MQTT binary sensor."""
|
||||
async_add_entities([MqttBinarySensor(config, config_entry, discovery_data)])
|
||||
async_add_entities([MqttBinarySensor(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttBinarySensor(
|
||||
@@ -115,9 +115,9 @@ class MqttBinarySensor(
|
||||
):
|
||||
"""Representation a binary sensor that is updated by MQTT."""
|
||||
|
||||
def __init__(self, config, config_entry, discovery_data):
|
||||
def __init__(self, hass, config, config_entry, discovery_data):
|
||||
"""Initialize the MQTT binary sensor."""
|
||||
self._config = config
|
||||
self.hass = hass
|
||||
self._unique_id = config.get(CONF_UNIQUE_ID)
|
||||
self._state = None
|
||||
self._sub_state = None
|
||||
@@ -128,6 +128,10 @@ class MqttBinarySensor(
|
||||
self._expired = True
|
||||
else:
|
||||
self._expired = None
|
||||
|
||||
# Load config
|
||||
self._setup_from_config(config)
|
||||
|
||||
device_config = config.get(CONF_DEVICE)
|
||||
|
||||
MqttAttributes.__init__(self, config)
|
||||
@@ -143,19 +147,22 @@ class MqttBinarySensor(
|
||||
async def discovery_update(self, discovery_payload):
|
||||
"""Handle updated discovery message."""
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
self._config = config
|
||||
self._setup_from_config(config)
|
||||
await self.attributes_discovery_update(config)
|
||||
await self.availability_discovery_update(config)
|
||||
await self.device_info_discovery_update(config)
|
||||
await self._subscribe_topics()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
def _setup_from_config(self, config):
|
||||
self._config = config
|
||||
value_template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if value_template is not None:
|
||||
value_template.hass = self.hass
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
|
||||
@callback
|
||||
def off_delay_listener(now):
|
||||
"""Switch device off after a delay."""
|
||||
|
||||
@@ -174,7 +174,7 @@ async def async_setup_platform(
|
||||
):
|
||||
"""Set up MQTT cover through configuration.yaml."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
await _async_setup_entity(config, async_add_entities)
|
||||
await _async_setup_entity(hass, config, async_add_entities)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
@@ -186,7 +186,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
try:
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
await _async_setup_entity(
|
||||
config, async_add_entities, config_entry, discovery_data
|
||||
hass, config, async_add_entities, config_entry, discovery_data
|
||||
)
|
||||
except Exception:
|
||||
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
|
||||
@@ -198,10 +198,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
||||
|
||||
async def _async_setup_entity(
|
||||
config, async_add_entities, config_entry=None, discovery_data=None
|
||||
hass, config, async_add_entities, config_entry=None, discovery_data=None
|
||||
):
|
||||
"""Set up the MQTT Cover."""
|
||||
async_add_entities([MqttCover(config, config_entry, discovery_data)])
|
||||
async_add_entities([MqttCover(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttCover(
|
||||
@@ -213,8 +213,9 @@ class MqttCover(
|
||||
):
|
||||
"""Representation of a cover that can be controlled using MQTT."""
|
||||
|
||||
def __init__(self, config, config_entry, discovery_data):
|
||||
def __init__(self, hass, config, config_entry, discovery_data):
|
||||
"""Initialize the cover."""
|
||||
self.hass = hass
|
||||
self._unique_id = config.get(CONF_UNIQUE_ID)
|
||||
self._position = None
|
||||
self._state = None
|
||||
@@ -257,8 +258,6 @@ class MqttCover(
|
||||
)
|
||||
self._tilt_optimistic = config[CONF_TILT_STATE_OPTIMISTIC]
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if template is not None:
|
||||
template.hass = self.hass
|
||||
@@ -269,6 +268,8 @@ class MqttCover(
|
||||
if tilt_status_template is not None:
|
||||
tilt_status_template.hass = self.hass
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
topics = {}
|
||||
|
||||
@callback
|
||||
@@ -276,6 +277,7 @@ class MqttCover(
|
||||
def tilt_message_received(msg):
|
||||
"""Handle tilt updates."""
|
||||
payload = msg.payload
|
||||
tilt_status_template = self._config.get(CONF_TILT_STATUS_TEMPLATE)
|
||||
if tilt_status_template is not None:
|
||||
payload = tilt_status_template.async_render_with_possible_json_value(
|
||||
payload
|
||||
@@ -296,6 +298,7 @@ class MqttCover(
|
||||
def state_message_received(msg):
|
||||
"""Handle new MQTT state messages."""
|
||||
payload = msg.payload
|
||||
template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if template is not None:
|
||||
payload = template.async_render_with_possible_json_value(payload)
|
||||
|
||||
@@ -321,6 +324,7 @@ class MqttCover(
|
||||
def position_message_received(msg):
|
||||
"""Handle new MQTT state messages."""
|
||||
payload = msg.payload
|
||||
template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if template is not None:
|
||||
payload = template.async_render_with_possible_json_value(payload)
|
||||
|
||||
|
||||
@@ -115,7 +115,7 @@ async def async_setup_platform(
|
||||
):
|
||||
"""Set up MQTT fan through configuration.yaml."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
await _async_setup_entity(config, async_add_entities)
|
||||
await _async_setup_entity(hass, config, async_add_entities)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
@@ -127,7 +127,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
try:
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
await _async_setup_entity(
|
||||
config, async_add_entities, config_entry, discovery_data
|
||||
hass, config, async_add_entities, config_entry, discovery_data
|
||||
)
|
||||
except Exception:
|
||||
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
|
||||
@@ -139,10 +139,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
||||
|
||||
async def _async_setup_entity(
|
||||
config, async_add_entities, config_entry=None, discovery_data=None
|
||||
hass, config, async_add_entities, config_entry=None, discovery_data=None
|
||||
):
|
||||
"""Set up the MQTT fan."""
|
||||
async_add_entities([MqttFan(config, config_entry, discovery_data)])
|
||||
async_add_entities([MqttFan(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttFan(
|
||||
@@ -154,8 +154,9 @@ class MqttFan(
|
||||
):
|
||||
"""A MQTT fan component."""
|
||||
|
||||
def __init__(self, config, config_entry, discovery_data):
|
||||
def __init__(self, hass, config, config_entry, discovery_data):
|
||||
"""Initialize the MQTT fan."""
|
||||
self.hass = hass
|
||||
self._unique_id = config.get(CONF_UNIQUE_ID)
|
||||
self._state = False
|
||||
self._speed = None
|
||||
@@ -242,22 +243,22 @@ class MqttFan(
|
||||
self._topic[CONF_SPEED_COMMAND_TOPIC] is not None and SUPPORT_SET_SPEED
|
||||
)
|
||||
|
||||
for key, tpl in list(self._templates.items()):
|
||||
if tpl is None:
|
||||
self._templates[key] = lambda value: value
|
||||
else:
|
||||
tpl.hass = self.hass
|
||||
self._templates[key] = tpl.async_render_with_possible_json_value
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
topics = {}
|
||||
templates = {}
|
||||
for key, tpl in list(self._templates.items()):
|
||||
if tpl is None:
|
||||
templates[key] = lambda value: value
|
||||
else:
|
||||
tpl.hass = self.hass
|
||||
templates[key] = tpl.async_render_with_possible_json_value
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def state_received(msg):
|
||||
"""Handle new received MQTT message."""
|
||||
payload = templates[CONF_STATE](msg.payload)
|
||||
payload = self._templates[CONF_STATE](msg.payload)
|
||||
if payload == self._payload["STATE_ON"]:
|
||||
self._state = True
|
||||
elif payload == self._payload["STATE_OFF"]:
|
||||
@@ -275,7 +276,7 @@ class MqttFan(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def speed_received(msg):
|
||||
"""Handle new received MQTT message for the speed."""
|
||||
payload = templates[ATTR_SPEED](msg.payload)
|
||||
payload = self._templates[ATTR_SPEED](msg.payload)
|
||||
if payload == self._payload["SPEED_LOW"]:
|
||||
self._speed = SPEED_LOW
|
||||
elif payload == self._payload["SPEED_MEDIUM"]:
|
||||
@@ -298,7 +299,7 @@ class MqttFan(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def oscillation_received(msg):
|
||||
"""Handle new received MQTT message for the oscillation."""
|
||||
payload = templates[OSCILLATION](msg.payload)
|
||||
payload = self._templates[OSCILLATION](msg.payload)
|
||||
if payload == self._payload["OSCILLATE_ON_PAYLOAD"]:
|
||||
self._oscillation = True
|
||||
elif payload == self._payload["OSCILLATE_OFF_PAYLOAD"]:
|
||||
|
||||
@@ -35,6 +35,7 @@ from homeassistant.const import (
|
||||
CONF_PAYLOAD_OFF,
|
||||
CONF_PAYLOAD_ON,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
@@ -157,6 +158,9 @@ async def async_setup_entity_basic(
|
||||
hass, config, async_add_entities, config_entry, discovery_data=None
|
||||
):
|
||||
"""Set up a MQTT Light."""
|
||||
if CONF_STATE_VALUE_TEMPLATE not in config and CONF_VALUE_TEMPLATE in config:
|
||||
config[CONF_STATE_VALUE_TEMPLATE] = config[CONF_VALUE_TEMPLATE]
|
||||
|
||||
async_add_entities([MqttLight(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
@@ -250,7 +254,7 @@ class MqttLight(
|
||||
|
||||
value_templates = {}
|
||||
for key in VALUE_TEMPLATE_KEYS:
|
||||
value_templates[key] = lambda value: value
|
||||
value_templates[key] = lambda value, _: value
|
||||
for key in VALUE_TEMPLATE_KEYS & config.keys():
|
||||
tpl = config[key]
|
||||
value_templates[key] = tpl.async_render_with_possible_json_value
|
||||
@@ -300,7 +304,9 @@ class MqttLight(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def state_received(msg):
|
||||
"""Handle new MQTT messages."""
|
||||
payload = self._value_templates[CONF_STATE_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._value_templates[CONF_STATE_VALUE_TEMPLATE](
|
||||
msg.payload, None
|
||||
)
|
||||
if not payload:
|
||||
_LOGGER.debug("Ignoring empty state message from '%s'", msg.topic)
|
||||
return
|
||||
@@ -324,7 +330,9 @@ class MqttLight(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def brightness_received(msg):
|
||||
"""Handle new MQTT messages for the brightness."""
|
||||
payload = self._value_templates[CONF_BRIGHTNESS_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._value_templates[CONF_BRIGHTNESS_VALUE_TEMPLATE](
|
||||
msg.payload, None
|
||||
)
|
||||
if not payload:
|
||||
_LOGGER.debug("Ignoring empty brightness message from '%s'", msg.topic)
|
||||
return
|
||||
@@ -356,7 +364,7 @@ class MqttLight(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def rgb_received(msg):
|
||||
"""Handle new MQTT messages for RGB."""
|
||||
payload = self._value_templates[CONF_RGB_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._value_templates[CONF_RGB_VALUE_TEMPLATE](msg.payload, None)
|
||||
if not payload:
|
||||
_LOGGER.debug("Ignoring empty rgb message from '%s'", msg.topic)
|
||||
return
|
||||
@@ -388,7 +396,9 @@ class MqttLight(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def color_temp_received(msg):
|
||||
"""Handle new MQTT messages for color temperature."""
|
||||
payload = self._value_templates[CONF_COLOR_TEMP_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._value_templates[CONF_COLOR_TEMP_VALUE_TEMPLATE](
|
||||
msg.payload, None
|
||||
)
|
||||
if not payload:
|
||||
_LOGGER.debug("Ignoring empty color temp message from '%s'", msg.topic)
|
||||
return
|
||||
@@ -418,7 +428,9 @@ class MqttLight(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def effect_received(msg):
|
||||
"""Handle new MQTT messages for effect."""
|
||||
payload = self._value_templates[CONF_EFFECT_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._value_templates[CONF_EFFECT_VALUE_TEMPLATE](
|
||||
msg.payload, None
|
||||
)
|
||||
if not payload:
|
||||
_LOGGER.debug("Ignoring empty effect message from '%s'", msg.topic)
|
||||
return
|
||||
@@ -448,7 +460,7 @@ class MqttLight(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def hs_received(msg):
|
||||
"""Handle new MQTT messages for hs color."""
|
||||
payload = self._value_templates[CONF_HS_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._value_templates[CONF_HS_VALUE_TEMPLATE](msg.payload, None)
|
||||
if not payload:
|
||||
_LOGGER.debug("Ignoring empty hs message from '%s'", msg.topic)
|
||||
return
|
||||
@@ -480,7 +492,9 @@ class MqttLight(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def white_value_received(msg):
|
||||
"""Handle new MQTT messages for white value."""
|
||||
payload = self._value_templates[CONF_WHITE_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._value_templates[CONF_WHITE_VALUE_TEMPLATE](
|
||||
msg.payload, None
|
||||
)
|
||||
if not payload:
|
||||
_LOGGER.debug("Ignoring empty white value message from '%s'", msg.topic)
|
||||
return
|
||||
@@ -512,7 +526,7 @@ class MqttLight(
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def xy_received(msg):
|
||||
"""Handle new MQTT messages for xy color."""
|
||||
payload = self._value_templates[CONF_XY_VALUE_TEMPLATE](msg.payload)
|
||||
payload = self._value_templates[CONF_XY_VALUE_TEMPLATE](msg.payload, None)
|
||||
if not payload:
|
||||
_LOGGER.debug("Ignoring empty xy-color message from '%s'", msg.topic)
|
||||
return
|
||||
|
||||
@@ -77,7 +77,7 @@ async def async_setup_platform(
|
||||
):
|
||||
"""Set up MQTT lock panel through configuration.yaml."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
await _async_setup_entity(config, async_add_entities)
|
||||
await _async_setup_entity(hass, config, async_add_entities)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
@@ -89,7 +89,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
try:
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
await _async_setup_entity(
|
||||
config, async_add_entities, config_entry, discovery_data
|
||||
hass, config, async_add_entities, config_entry, discovery_data
|
||||
)
|
||||
except Exception:
|
||||
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
|
||||
@@ -101,10 +101,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
||||
|
||||
async def _async_setup_entity(
|
||||
config, async_add_entities, config_entry=None, discovery_data=None
|
||||
hass, config, async_add_entities, config_entry=None, discovery_data=None
|
||||
):
|
||||
"""Set up the MQTT Lock platform."""
|
||||
async_add_entities([MqttLock(config, config_entry, discovery_data)])
|
||||
async_add_entities([MqttLock(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttLock(
|
||||
@@ -116,8 +116,9 @@ class MqttLock(
|
||||
):
|
||||
"""Representation of a lock that can be toggled using MQTT."""
|
||||
|
||||
def __init__(self, config, config_entry, discovery_data):
|
||||
def __init__(self, hass, config, config_entry, discovery_data):
|
||||
"""Initialize the lock."""
|
||||
self.hass = hass
|
||||
self._unique_id = config.get(CONF_UNIQUE_ID)
|
||||
self._state = False
|
||||
self._sub_state = None
|
||||
@@ -154,17 +155,19 @@ class MqttLock(
|
||||
|
||||
self._optimistic = config[CONF_OPTIMISTIC]
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
value_template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if value_template is not None:
|
||||
value_template.hass = self.hass
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def message_received(msg):
|
||||
"""Handle new MQTT messages."""
|
||||
payload = msg.payload
|
||||
value_template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if value_template is not None:
|
||||
payload = value_template.async_render_with_possible_json_value(payload)
|
||||
if payload == self._config[CONF_STATE_LOCKED]:
|
||||
|
||||
@@ -70,7 +70,7 @@ async def async_setup_platform(
|
||||
):
|
||||
"""Set up MQTT sensors through configuration.yaml."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
await _async_setup_entity(config, async_add_entities)
|
||||
await _async_setup_entity(hass, config, async_add_entities)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
@@ -82,7 +82,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
try:
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
await _async_setup_entity(
|
||||
config, async_add_entities, config_entry, discovery_data
|
||||
hass, config, async_add_entities, config_entry, discovery_data
|
||||
)
|
||||
except Exception:
|
||||
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
|
||||
@@ -94,10 +94,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
||||
|
||||
async def _async_setup_entity(
|
||||
config: ConfigType, async_add_entities, config_entry=None, discovery_data=None
|
||||
hass, config: ConfigType, async_add_entities, config_entry=None, discovery_data=None
|
||||
):
|
||||
"""Set up MQTT sensor."""
|
||||
async_add_entities([MqttSensor(config, config_entry, discovery_data)])
|
||||
async_add_entities([MqttSensor(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttSensor(
|
||||
@@ -105,9 +105,9 @@ class MqttSensor(
|
||||
):
|
||||
"""Representation of a sensor that can be updated using MQTT."""
|
||||
|
||||
def __init__(self, config, config_entry, discovery_data):
|
||||
def __init__(self, hass, config, config_entry, discovery_data):
|
||||
"""Initialize the sensor."""
|
||||
self._config = config
|
||||
self.hass = hass
|
||||
self._unique_id = config.get(CONF_UNIQUE_ID)
|
||||
self._state = None
|
||||
self._sub_state = None
|
||||
@@ -118,6 +118,10 @@ class MqttSensor(
|
||||
self._expired = True
|
||||
else:
|
||||
self._expired = None
|
||||
|
||||
# Load config
|
||||
self._setup_from_config(config)
|
||||
|
||||
device_config = config.get(CONF_DEVICE)
|
||||
|
||||
MqttAttributes.__init__(self, config)
|
||||
@@ -133,19 +137,23 @@ class MqttSensor(
|
||||
async def discovery_update(self, discovery_payload):
|
||||
"""Handle updated discovery message."""
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
self._config = config
|
||||
self._setup_from_config(config)
|
||||
await self.attributes_discovery_update(config)
|
||||
await self.availability_discovery_update(config)
|
||||
await self.device_info_discovery_update(config)
|
||||
await self._subscribe_topics()
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
def _setup_from_config(self, config):
|
||||
"""(Re)Setup the entity."""
|
||||
self._config = config
|
||||
template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if template is not None:
|
||||
template.hass = self.hass
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def message_received(msg):
|
||||
@@ -169,6 +177,7 @@ class MqttSensor(
|
||||
self.hass, self._value_is_expired, expiration_at
|
||||
)
|
||||
|
||||
template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if template is not None:
|
||||
payload = template.async_render_with_possible_json_value(
|
||||
payload, self._state
|
||||
|
||||
@@ -73,7 +73,7 @@ async def async_setup_platform(
|
||||
):
|
||||
"""Set up MQTT switch through configuration.yaml."""
|
||||
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
|
||||
await _async_setup_entity(config, async_add_entities, discovery_info)
|
||||
await _async_setup_entity(hass, config, async_add_entities, discovery_info)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
@@ -85,7 +85,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
try:
|
||||
config = PLATFORM_SCHEMA(discovery_payload)
|
||||
await _async_setup_entity(
|
||||
config, async_add_entities, config_entry, discovery_data
|
||||
hass, config, async_add_entities, config_entry, discovery_data
|
||||
)
|
||||
except Exception:
|
||||
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
|
||||
@@ -97,10 +97,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
|
||||
|
||||
async def _async_setup_entity(
|
||||
config, async_add_entities, config_entry=None, discovery_data=None
|
||||
hass, config, async_add_entities, config_entry=None, discovery_data=None
|
||||
):
|
||||
"""Set up the MQTT switch."""
|
||||
async_add_entities([MqttSwitch(config, config_entry, discovery_data)])
|
||||
async_add_entities([MqttSwitch(hass, config, config_entry, discovery_data)])
|
||||
|
||||
|
||||
class MqttSwitch(
|
||||
@@ -113,8 +113,9 @@ class MqttSwitch(
|
||||
):
|
||||
"""Representation of a switch that can be toggled using MQTT."""
|
||||
|
||||
def __init__(self, config, config_entry, discovery_data):
|
||||
def __init__(self, hass, config, config_entry, discovery_data):
|
||||
"""Initialize the MQTT switch."""
|
||||
self.hass = hass
|
||||
self._state = False
|
||||
self._sub_state = None
|
||||
|
||||
@@ -160,17 +161,19 @@ class MqttSwitch(
|
||||
|
||||
self._optimistic = config[CONF_OPTIMISTIC]
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if template is not None:
|
||||
template.hass = self.hass
|
||||
|
||||
async def _subscribe_topics(self):
|
||||
"""(Re)Subscribe to topics."""
|
||||
|
||||
@callback
|
||||
@log_messages(self.hass, self.entity_id)
|
||||
def state_message_received(msg):
|
||||
"""Handle new MQTT state messages."""
|
||||
payload = msg.payload
|
||||
template = self._config.get(CONF_VALUE_TEMPLATE)
|
||||
if template is not None:
|
||||
payload = template.async_render_with_possible_json_value(payload)
|
||||
if payload == self._state_on:
|
||||
|
||||
@@ -4,7 +4,11 @@ import logging
|
||||
import re
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from homeassistant.components.media_player.const import MEDIA_TYPE_VIDEO
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_CLASS_VIDEO,
|
||||
MEDIA_TYPE_VIDEO,
|
||||
)
|
||||
from homeassistant.components.media_player.errors import BrowseError
|
||||
from homeassistant.components.media_source.const import MEDIA_MIME_TYPES
|
||||
from homeassistant.components.media_source.error import MediaSourceError, Unresolvable
|
||||
@@ -88,9 +92,12 @@ class NetatmoSource(MediaSource):
|
||||
else:
|
||||
path = f"{source}/{camera_id}"
|
||||
|
||||
media_class = MEDIA_CLASS_DIRECTORY if event_id is None else MEDIA_CLASS_VIDEO
|
||||
|
||||
media = BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=path,
|
||||
media_class=media_class,
|
||||
media_content_type=MEDIA_TYPE_VIDEO,
|
||||
title=title,
|
||||
can_play=bool(
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
"abort": {
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]"
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
@@ -39,4 +40,4 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(sensors, True)
|
||||
async_add_entities(sensors)
|
||||
|
||||
|
||||
class NZBGetSensor(NZBGetEntity, Entity):
|
||||
@@ -108,7 +108,7 @@ class NZBGetSensor(NZBGetEntity, Entity):
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the sensor."""
|
||||
value = self.coordinator.data.status.get(self._sensor_type)
|
||||
value = self.coordinator.data["status"].get(self._sensor_type)
|
||||
|
||||
if value is None:
|
||||
_LOGGER.warning("Unable to locate value for %s", self._sensor_type)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Register a custom front end panel."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -15,7 +14,6 @@ CONF_SIDEBAR_TITLE = "sidebar_title"
|
||||
CONF_SIDEBAR_ICON = "sidebar_icon"
|
||||
CONF_URL_PATH = "url_path"
|
||||
CONF_CONFIG = "config"
|
||||
CONF_WEBCOMPONENT_PATH = "webcomponent_path"
|
||||
CONF_JS_URL = "js_url"
|
||||
CONF_MODULE_URL = "module_url"
|
||||
CONF_EMBED_IFRAME = "embed_iframe"
|
||||
@@ -32,55 +30,34 @@ LEGACY_URL = "/api/panel_custom/{}"
|
||||
PANEL_DIR = "panels"
|
||||
|
||||
|
||||
def url_validator(value):
|
||||
"""Validate required urls are specified."""
|
||||
has_js_url = CONF_JS_URL in value
|
||||
has_html_url = CONF_WEBCOMPONENT_PATH in value
|
||||
has_module_url = CONF_MODULE_URL in value
|
||||
|
||||
if has_html_url and (has_js_url or has_module_url):
|
||||
raise vol.Invalid("You cannot specify other urls besides a webcomponent path")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
vol.All(
|
||||
cv.deprecated(CONF_WEBCOMPONENT_PATH, invalidation_version="0.115"),
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_COMPONENT_NAME): cv.string,
|
||||
vol.Optional(CONF_SIDEBAR_TITLE): cv.string,
|
||||
vol.Optional(
|
||||
CONF_SIDEBAR_ICON, default=DEFAULT_ICON
|
||||
): cv.icon,
|
||||
vol.Optional(CONF_URL_PATH): cv.string,
|
||||
vol.Optional(CONF_CONFIG): dict,
|
||||
vol.Optional(
|
||||
CONF_WEBCOMPONENT_PATH,
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_JS_URL,
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_MODULE_URL,
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_EMBED_IFRAME, default=DEFAULT_EMBED_IFRAME
|
||||
): cv.boolean,
|
||||
vol.Optional(
|
||||
CONF_TRUST_EXTERNAL_SCRIPT,
|
||||
default=DEFAULT_TRUST_EXTERNAL,
|
||||
): cv.boolean,
|
||||
vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean,
|
||||
}
|
||||
),
|
||||
url_validator,
|
||||
)
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_COMPONENT_NAME): cv.string,
|
||||
vol.Optional(CONF_SIDEBAR_TITLE): cv.string,
|
||||
vol.Optional(CONF_SIDEBAR_ICON, default=DEFAULT_ICON): cv.icon,
|
||||
vol.Optional(CONF_URL_PATH): cv.string,
|
||||
vol.Optional(CONF_CONFIG): dict,
|
||||
vol.Optional(
|
||||
CONF_JS_URL,
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_MODULE_URL,
|
||||
): cv.string,
|
||||
vol.Optional(
|
||||
CONF_EMBED_IFRAME, default=DEFAULT_EMBED_IFRAME
|
||||
): cv.boolean,
|
||||
vol.Optional(
|
||||
CONF_TRUST_EXTERNAL_SCRIPT,
|
||||
default=DEFAULT_TRUST_EXTERNAL,
|
||||
): cv.boolean,
|
||||
vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean,
|
||||
}
|
||||
),
|
||||
],
|
||||
)
|
||||
},
|
||||
@@ -98,8 +75,6 @@ async def async_register_panel(
|
||||
# Title/icon for sidebar
|
||||
sidebar_title=None,
|
||||
sidebar_icon=None,
|
||||
# HTML source of your panel
|
||||
html_url=None,
|
||||
# JS source of your panel
|
||||
js_url=None,
|
||||
# JS module of your panel
|
||||
@@ -114,16 +89,11 @@ async def async_register_panel(
|
||||
require_admin=False,
|
||||
):
|
||||
"""Register a new custom panel."""
|
||||
if js_url is None and html_url is None and module_url is None:
|
||||
if js_url is None and module_url is None:
|
||||
raise ValueError("Either js_url, module_url or html_url is required.")
|
||||
if html_url and (js_url or module_url):
|
||||
raise ValueError("You cannot specify other paths with an HTML url")
|
||||
if config is not None and not isinstance(config, dict):
|
||||
raise ValueError("Config needs to be a dictionary.")
|
||||
|
||||
if html_url:
|
||||
_LOGGER.warning("HTML custom panels have been deprecated")
|
||||
|
||||
custom_panel_config = {
|
||||
"name": webcomponent_name,
|
||||
"embed_iframe": embed_iframe,
|
||||
@@ -136,9 +106,6 @@ async def async_register_panel(
|
||||
if module_url is not None:
|
||||
custom_panel_config["module_url"] = module_url
|
||||
|
||||
if html_url is not None:
|
||||
custom_panel_config["html_url"] = html_url
|
||||
|
||||
if config is not None:
|
||||
# Make copy because we're mutating it
|
||||
config = dict(config)
|
||||
@@ -162,8 +129,6 @@ async def async_setup(hass, config):
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
seen = set()
|
||||
|
||||
for panel in config[DOMAIN]:
|
||||
name = panel[CONF_COMPONENT_NAME]
|
||||
|
||||
@@ -184,29 +149,6 @@ async def async_setup(hass, config):
|
||||
if CONF_MODULE_URL in panel:
|
||||
kwargs["module_url"] = panel[CONF_MODULE_URL]
|
||||
|
||||
if CONF_MODULE_URL not in panel and CONF_JS_URL not in panel:
|
||||
if name in seen:
|
||||
_LOGGER.warning(
|
||||
"Got HTML panel with duplicate name %s. Not registering", name
|
||||
)
|
||||
continue
|
||||
|
||||
seen.add(name)
|
||||
panel_path = panel.get(CONF_WEBCOMPONENT_PATH)
|
||||
|
||||
if panel_path is None:
|
||||
panel_path = hass.config.path(PANEL_DIR, f"{name}.html")
|
||||
|
||||
if not await hass.async_add_executor_job(os.path.isfile, panel_path):
|
||||
_LOGGER.error(
|
||||
"Unable to find webcomponent for %s: %s", name, panel_path
|
||||
)
|
||||
continue
|
||||
|
||||
url = LEGACY_URL.format(name)
|
||||
hass.http.register_static_path(url, panel_path)
|
||||
kwargs["html_url"] = url
|
||||
|
||||
try:
|
||||
await async_register_panel(hass, **kwargs)
|
||||
except ValueError as err:
|
||||
|
||||
@@ -11,6 +11,8 @@ from homeassistant.components.media_player import (
|
||||
MediaPlayerEntity,
|
||||
)
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_CHANNEL,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_TYPE_CHANNEL,
|
||||
MEDIA_TYPE_CHANNELS,
|
||||
SUPPORT_BROWSE_MEDIA,
|
||||
@@ -288,6 +290,7 @@ class PhilipsTVMediaPlayer(MediaPlayerEntity):
|
||||
|
||||
return BrowseMedia(
|
||||
title="Channels",
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_id="",
|
||||
media_content_type=MEDIA_TYPE_CHANNELS,
|
||||
can_play=False,
|
||||
@@ -295,6 +298,7 @@ class PhilipsTVMediaPlayer(MediaPlayerEntity):
|
||||
children=[
|
||||
BrowseMedia(
|
||||
title=channel,
|
||||
media_class=MEDIA_CLASS_CHANNEL,
|
||||
media_content_id=channel,
|
||||
media_content_type=MEDIA_TYPE_CHANNEL,
|
||||
can_play=True,
|
||||
|
||||
@@ -1,4 +1,28 @@
|
||||
"""The ping component."""
|
||||
|
||||
from homeassistant.core import callback
|
||||
|
||||
DOMAIN = "ping"
|
||||
PLATFORMS = ["binary_sensor"]
|
||||
|
||||
PING_ID = "ping_id"
|
||||
DEFAULT_START_ID = 129
|
||||
MAX_PING_ID = 65534
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_next_ping_id(hass):
|
||||
"""Find the next id to use in the outbound ping.
|
||||
|
||||
Must be called in async
|
||||
"""
|
||||
current_id = hass.data.setdefault(DOMAIN, {}).get(PING_ID, DEFAULT_START_ID)
|
||||
|
||||
if current_id == MAX_PING_ID:
|
||||
next_id = DEFAULT_START_ID
|
||||
else:
|
||||
next_id = current_id + 1
|
||||
|
||||
hass.data[DOMAIN][PING_ID] = next_id
|
||||
|
||||
return next_id
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Tracks the latency of a host by sending ICMP echo requests (ping)."""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
@@ -14,7 +15,7 @@ from homeassistant.const import CONF_HOST, CONF_NAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.reload import setup_reload_service
|
||||
|
||||
from . import DOMAIN, PLATFORMS
|
||||
from . import DOMAIN, PLATFORMS, async_get_next_ping_id
|
||||
from .const import PING_TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -131,20 +132,28 @@ class PingData:
|
||||
class PingDataICMPLib(PingData):
|
||||
"""The Class for handling the data retrieval using icmplib."""
|
||||
|
||||
def ping(self):
|
||||
"""Send ICMP echo request and return details."""
|
||||
return icmp_ping(self._ip_address, count=self._count)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Retrieve the latest details from the host."""
|
||||
data = await self.hass.async_add_executor_job(self.ping)
|
||||
_LOGGER.debug("ping address: %s", self._ip_address)
|
||||
data = await self.hass.async_add_executor_job(
|
||||
partial(
|
||||
icmp_ping,
|
||||
self._ip_address,
|
||||
count=self._count,
|
||||
id=async_get_next_ping_id(self.hass),
|
||||
)
|
||||
)
|
||||
self.available = data.is_alive
|
||||
if not self.available:
|
||||
self.data = False
|
||||
return
|
||||
|
||||
self.data = {
|
||||
"min": data.min_rtt,
|
||||
"max": data.max_rtt,
|
||||
"avg": data.avg_rtt,
|
||||
"mdev": "",
|
||||
}
|
||||
self.available = data.is_alive
|
||||
|
||||
|
||||
class PingDataSubProcess(PingData):
|
||||
|
||||
@@ -15,8 +15,10 @@ from homeassistant.components.device_tracker.const import (
|
||||
SOURCE_TYPE_ROUTER,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
from homeassistant.util.process import kill_subprocess
|
||||
|
||||
from . import async_get_next_ping_id
|
||||
from .const import PING_ATTEMPTS_COUNT, PING_TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -76,15 +78,22 @@ class HostSubProcess:
|
||||
class HostICMPLib:
|
||||
"""Host object with ping detection."""
|
||||
|
||||
def __init__(self, ip_address, dev_id, _, config):
|
||||
def __init__(self, ip_address, dev_id, hass, config):
|
||||
"""Initialize the Host pinger."""
|
||||
self.hass = hass
|
||||
self.ip_address = ip_address
|
||||
self.dev_id = dev_id
|
||||
self._count = config[CONF_PING_COUNT]
|
||||
|
||||
def ping(self):
|
||||
"""Send an ICMP echo request and return True if success."""
|
||||
return icmp_ping(self.ip_address, count=PING_ATTEMPTS_COUNT).is_alive
|
||||
next_id = run_callback_threadsafe(
|
||||
self.hass.loop, async_get_next_ping_id, self.hass
|
||||
).result()
|
||||
|
||||
return icmp_ping(
|
||||
self.ip_address, count=PING_ATTEMPTS_COUNT, id=next_id
|
||||
).is_alive
|
||||
|
||||
def update(self, see):
|
||||
"""Update device state by sending one or more ping messages."""
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
"name": "Ping (ICMP)",
|
||||
"documentation": "https://www.home-assistant.io/integrations/ping",
|
||||
"codeowners": [],
|
||||
"requirements": ["icmplib==1.1.1"],
|
||||
"requirements": ["icmplib==1.1.3"],
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
|
||||
@@ -2,13 +2,31 @@
|
||||
import logging
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_ALBUM,
|
||||
MEDIA_CLASS_ARTIST,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_CLASS_EPISODE,
|
||||
MEDIA_CLASS_MOVIE,
|
||||
MEDIA_CLASS_PLAYLIST,
|
||||
MEDIA_CLASS_SEASON,
|
||||
MEDIA_CLASS_TRACK,
|
||||
MEDIA_CLASS_TV_SHOW,
|
||||
MEDIA_CLASS_VIDEO,
|
||||
)
|
||||
from homeassistant.components.media_player.errors import BrowseError
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class UnknownMediaType(BrowseError):
|
||||
"""Unknown media type."""
|
||||
|
||||
|
||||
EXPANDABLES = ["album", "artist", "playlist", "season", "show"]
|
||||
PLAYLISTS_BROWSE_PAYLOAD = {
|
||||
"title": "Playlists",
|
||||
"media_class": MEDIA_CLASS_DIRECTORY,
|
||||
"media_content_id": "all",
|
||||
"media_content_type": "playlists",
|
||||
"can_play": False,
|
||||
@@ -19,6 +37,18 @@ SPECIAL_METHODS = {
|
||||
"Recently Added": "recentlyAdded",
|
||||
}
|
||||
|
||||
ITEM_TYPE_MEDIA_CLASS = {
|
||||
"album": MEDIA_CLASS_ALBUM,
|
||||
"artist": MEDIA_CLASS_ARTIST,
|
||||
"episode": MEDIA_CLASS_EPISODE,
|
||||
"movie": MEDIA_CLASS_MOVIE,
|
||||
"playlist": MEDIA_CLASS_PLAYLIST,
|
||||
"season": MEDIA_CLASS_SEASON,
|
||||
"show": MEDIA_CLASS_TV_SHOW,
|
||||
"track": MEDIA_CLASS_TRACK,
|
||||
"video": MEDIA_CLASS_VIDEO,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -34,11 +64,17 @@ def browse_media(
|
||||
if media is None:
|
||||
return None
|
||||
|
||||
media_info = item_payload(media)
|
||||
try:
|
||||
media_info = item_payload(media)
|
||||
except UnknownMediaType:
|
||||
return None
|
||||
if media_info.can_expand:
|
||||
media_info.children = []
|
||||
for item in media:
|
||||
media_info.children.append(item_payload(item))
|
||||
try:
|
||||
media_info.children.append(item_payload(item))
|
||||
except UnknownMediaType:
|
||||
continue
|
||||
return media_info
|
||||
|
||||
if media_content_id and ":" in media_content_id:
|
||||
@@ -58,31 +94,54 @@ def browse_media(
|
||||
if special_folder:
|
||||
if media_content_type == "server":
|
||||
library_or_section = plex_server.library
|
||||
children_media_class = MEDIA_CLASS_DIRECTORY
|
||||
title = plex_server.friendly_name
|
||||
elif media_content_type == "library":
|
||||
library_or_section = plex_server.library.sectionByID(media_content_id)
|
||||
title = library_or_section.title
|
||||
try:
|
||||
children_media_class = ITEM_TYPE_MEDIA_CLASS[library_or_section.TYPE]
|
||||
except KeyError as err:
|
||||
raise BrowseError(
|
||||
f"Media not found: {media_content_type} / {media_content_id}"
|
||||
) from err
|
||||
else:
|
||||
raise BrowseError(
|
||||
f"Media not found: {media_content_type} / {media_content_id}"
|
||||
)
|
||||
|
||||
payload = {
|
||||
"title": title,
|
||||
"media_class": MEDIA_CLASS_DIRECTORY,
|
||||
"media_content_id": f"{media_content_id}:{special_folder}",
|
||||
"media_content_type": media_content_type,
|
||||
"can_play": False,
|
||||
"can_expand": True,
|
||||
"children": [],
|
||||
"children_media_class": children_media_class,
|
||||
}
|
||||
|
||||
method = SPECIAL_METHODS[special_folder]
|
||||
items = getattr(library_or_section, method)()
|
||||
for item in items:
|
||||
payload["children"].append(item_payload(item))
|
||||
try:
|
||||
payload["children"].append(item_payload(item))
|
||||
except UnknownMediaType:
|
||||
continue
|
||||
|
||||
return BrowseMedia(**payload)
|
||||
|
||||
if media_content_type in ["server", None]:
|
||||
return server_payload(plex_server)
|
||||
try:
|
||||
if media_content_type in ["server", None]:
|
||||
return server_payload(plex_server)
|
||||
|
||||
if media_content_type == "library":
|
||||
return library_payload(plex_server, media_content_id)
|
||||
if media_content_type == "library":
|
||||
return library_payload(plex_server, media_content_id)
|
||||
|
||||
except UnknownMediaType as err:
|
||||
raise BrowseError(
|
||||
f"Media not found: {media_content_type} / {media_content_id}"
|
||||
) from err
|
||||
|
||||
if media_content_type == "playlists":
|
||||
return playlists_payload(plex_server)
|
||||
@@ -99,8 +158,14 @@ def browse_media(
|
||||
|
||||
def item_payload(item):
|
||||
"""Create response payload for a single media item."""
|
||||
try:
|
||||
media_class = ITEM_TYPE_MEDIA_CLASS[item.type]
|
||||
except KeyError as err:
|
||||
_LOGGER.debug("Unknown type received: %s", item.type)
|
||||
raise UnknownMediaType from err
|
||||
payload = {
|
||||
"title": item.title,
|
||||
"media_class": media_class,
|
||||
"media_content_id": str(item.ratingKey),
|
||||
"media_content_type": item.type,
|
||||
"can_play": True,
|
||||
@@ -114,12 +179,19 @@ def item_payload(item):
|
||||
|
||||
def library_section_payload(section):
|
||||
"""Create response payload for a single library section."""
|
||||
try:
|
||||
children_media_class = ITEM_TYPE_MEDIA_CLASS[section.TYPE]
|
||||
except KeyError as err:
|
||||
_LOGGER.debug("Unknown type received: %s", section.TYPE)
|
||||
raise UnknownMediaType from err
|
||||
return BrowseMedia(
|
||||
title=section.title,
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_id=section.key,
|
||||
media_content_type="library",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=children_media_class,
|
||||
)
|
||||
|
||||
|
||||
@@ -128,6 +200,7 @@ def special_library_payload(parent_payload, special_type):
|
||||
title = f"{special_type} ({parent_payload.title})"
|
||||
return BrowseMedia(
|
||||
title=title,
|
||||
media_class=parent_payload.media_class,
|
||||
media_content_id=f"{parent_payload.media_content_id}:{special_type}",
|
||||
media_content_type=parent_payload.media_content_type,
|
||||
can_play=False,
|
||||
@@ -139,12 +212,14 @@ def server_payload(plex_server):
|
||||
"""Create response payload to describe libraries of the Plex server."""
|
||||
server_info = BrowseMedia(
|
||||
title=plex_server.friendly_name,
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_id=plex_server.machine_identifier,
|
||||
media_content_type="server",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
)
|
||||
server_info.children = []
|
||||
server_info.children_media_class = MEDIA_CLASS_DIRECTORY
|
||||
server_info.children.append(special_library_payload(server_info, "On Deck"))
|
||||
server_info.children.append(special_library_payload(server_info, "Recently Added"))
|
||||
for library in plex_server.library.sections():
|
||||
@@ -165,7 +240,10 @@ def library_payload(plex_server, library_id):
|
||||
special_library_payload(library_info, "Recently Added")
|
||||
)
|
||||
for item in library.all():
|
||||
library_info.children.append(item_payload(item))
|
||||
try:
|
||||
library_info.children.append(item_payload(item))
|
||||
except UnknownMediaType:
|
||||
continue
|
||||
return library_info
|
||||
|
||||
|
||||
@@ -173,5 +251,10 @@ def playlists_payload(plex_server):
|
||||
"""Create response payload for all available playlists."""
|
||||
playlists_info = {**PLAYLISTS_BROWSE_PAYLOAD, "children": []}
|
||||
for playlist in plex_server.playlists():
|
||||
playlists_info["children"].append(item_payload(playlist))
|
||||
return BrowseMedia(**playlists_info)
|
||||
try:
|
||||
playlists_info["children"].append(item_payload(playlist))
|
||||
except UnknownMediaType:
|
||||
continue
|
||||
response = BrowseMedia(**playlists_info)
|
||||
response.children_media_class = MEDIA_CLASS_PLAYLIST
|
||||
return response
|
||||
|
||||
154
homeassistant/components/roku/browse_media.py
Normal file
154
homeassistant/components/roku/browse_media.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Support for media browsing."""
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_APP,
|
||||
MEDIA_CLASS_CHANNEL,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_TYPE_APP,
|
||||
MEDIA_TYPE_APPS,
|
||||
MEDIA_TYPE_CHANNEL,
|
||||
MEDIA_TYPE_CHANNELS,
|
||||
)
|
||||
|
||||
CONTENT_TYPE_MEDIA_CLASS = {
|
||||
MEDIA_TYPE_APP: MEDIA_CLASS_APP,
|
||||
MEDIA_TYPE_APPS: MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_TYPE_CHANNEL: MEDIA_CLASS_CHANNEL,
|
||||
MEDIA_TYPE_CHANNELS: MEDIA_CLASS_DIRECTORY,
|
||||
}
|
||||
|
||||
PLAYABLE_MEDIA_TYPES = [
|
||||
MEDIA_TYPE_APP,
|
||||
MEDIA_TYPE_CHANNEL,
|
||||
]
|
||||
|
||||
EXPANDABLE_MEDIA_TYPES = [
|
||||
MEDIA_TYPE_APPS,
|
||||
MEDIA_TYPE_CHANNELS,
|
||||
]
|
||||
|
||||
|
||||
def build_item_response(coordinator, payload):
|
||||
"""Create response payload for the provided media query."""
|
||||
search_id = payload["search_id"]
|
||||
search_type = payload["search_type"]
|
||||
|
||||
thumbnail = None
|
||||
title = None
|
||||
media = None
|
||||
|
||||
if search_type == MEDIA_TYPE_APPS:
|
||||
title = "Apps"
|
||||
media = [
|
||||
{"app_id": item.app_id, "title": item.name, "type": MEDIA_TYPE_APP}
|
||||
for item in coordinator.data.apps
|
||||
]
|
||||
elif search_type == MEDIA_TYPE_CHANNELS:
|
||||
title = "Channels"
|
||||
media = [
|
||||
{
|
||||
"channel_number": item.number,
|
||||
"title": item.name,
|
||||
"type": MEDIA_TYPE_CHANNEL,
|
||||
}
|
||||
for item in coordinator.data.channels
|
||||
]
|
||||
|
||||
if media is None:
|
||||
return None
|
||||
|
||||
return BrowseMedia(
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_id=search_id,
|
||||
media_content_type=search_type,
|
||||
title=title,
|
||||
can_play=search_type in PLAYABLE_MEDIA_TYPES and search_id,
|
||||
can_expand=True,
|
||||
children=[item_payload(item, coordinator) for item in media],
|
||||
thumbnail=thumbnail,
|
||||
)
|
||||
|
||||
|
||||
def item_payload(item, coordinator):
|
||||
"""
|
||||
Create response payload for a single media item.
|
||||
|
||||
Used by async_browse_media.
|
||||
"""
|
||||
thumbnail = None
|
||||
|
||||
if "app_id" in item:
|
||||
media_content_type = MEDIA_TYPE_APP
|
||||
media_content_id = item["app_id"]
|
||||
thumbnail = coordinator.roku.app_icon_url(item["app_id"])
|
||||
elif "channel_number" in item:
|
||||
media_content_type = MEDIA_TYPE_CHANNEL
|
||||
media_content_id = item["channel_number"]
|
||||
else:
|
||||
media_content_type = item["type"]
|
||||
media_content_id = ""
|
||||
|
||||
title = item["title"]
|
||||
can_play = media_content_type in PLAYABLE_MEDIA_TYPES and media_content_id
|
||||
can_expand = media_content_type in EXPANDABLE_MEDIA_TYPES
|
||||
|
||||
return BrowseMedia(
|
||||
title=title,
|
||||
media_class=CONTENT_TYPE_MEDIA_CLASS[media_content_type],
|
||||
media_content_type=media_content_type,
|
||||
media_content_id=media_content_id,
|
||||
can_play=can_play,
|
||||
can_expand=can_expand,
|
||||
thumbnail=thumbnail,
|
||||
)
|
||||
|
||||
|
||||
def library_payload(coordinator):
|
||||
"""
|
||||
Create response payload to describe contents of a specific library.
|
||||
|
||||
Used by async_browse_media.
|
||||
"""
|
||||
library_info = BrowseMedia(
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_id="library",
|
||||
media_content_type="library",
|
||||
title="Media Library",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children=[],
|
||||
)
|
||||
|
||||
library = {
|
||||
MEDIA_TYPE_APPS: "Apps",
|
||||
MEDIA_TYPE_CHANNELS: "Channels",
|
||||
}
|
||||
|
||||
for item in [{"title": name, "type": type_} for type_, name in library.items()]:
|
||||
if (
|
||||
item["type"] == MEDIA_TYPE_CHANNELS
|
||||
and coordinator.data.info.device_type != "tv"
|
||||
):
|
||||
continue
|
||||
|
||||
library_info.children.append(
|
||||
item_payload(
|
||||
{"title": item["title"], "type": item["type"]},
|
||||
coordinator,
|
||||
)
|
||||
)
|
||||
|
||||
if all(
|
||||
child.media_content_type == MEDIA_TYPE_APPS for child in library_info.children
|
||||
):
|
||||
library_info.children_media_class = MEDIA_CLASS_APP
|
||||
elif all(
|
||||
child.media_content_type == MEDIA_TYPE_CHANNELS
|
||||
for child in library_info.children
|
||||
):
|
||||
library_info.children_media_class = MEDIA_CLASS_CHANNEL
|
||||
else:
|
||||
library_info.children_media_class = MEDIA_CLASS_DIRECTORY
|
||||
|
||||
return library_info
|
||||
@@ -7,14 +7,11 @@ import voluptuous as vol
|
||||
from homeassistant.components.media_player import (
|
||||
DEVICE_CLASS_RECEIVER,
|
||||
DEVICE_CLASS_TV,
|
||||
BrowseMedia,
|
||||
MediaPlayerEntity,
|
||||
)
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_TYPE_APP,
|
||||
MEDIA_TYPE_APPS,
|
||||
MEDIA_TYPE_CHANNEL,
|
||||
MEDIA_TYPE_CHANNELS,
|
||||
SUPPORT_BROWSE_MEDIA,
|
||||
SUPPORT_NEXT_TRACK,
|
||||
SUPPORT_PAUSE,
|
||||
@@ -39,6 +36,7 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers import entity_platform
|
||||
|
||||
from . import RokuDataUpdateCoordinator, RokuEntity, roku_exception_handler
|
||||
from .browse_media import build_item_response, library_payload
|
||||
from .const import ATTR_KEYWORD, DOMAIN, SERVICE_SEARCH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -75,41 +73,6 @@ async def async_setup_entry(hass, entry, async_add_entities):
|
||||
)
|
||||
|
||||
|
||||
def browse_media_library(channels: bool = False) -> BrowseMedia:
|
||||
"""Create response payload to describe contents of a specific library."""
|
||||
library_info = BrowseMedia(
|
||||
title="Media Library",
|
||||
media_content_id="library",
|
||||
media_content_type="library",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children=[],
|
||||
)
|
||||
|
||||
library_info.children.append(
|
||||
BrowseMedia(
|
||||
title="Apps",
|
||||
media_content_id="apps",
|
||||
media_content_type=MEDIA_TYPE_APPS,
|
||||
can_expand=True,
|
||||
can_play=False,
|
||||
)
|
||||
)
|
||||
|
||||
if channels:
|
||||
library_info.children.append(
|
||||
BrowseMedia(
|
||||
title="Channels",
|
||||
media_content_id="channels",
|
||||
media_content_type=MEDIA_TYPE_CHANNELS,
|
||||
can_expand=True,
|
||||
can_play=False,
|
||||
)
|
||||
)
|
||||
|
||||
return library_info
|
||||
|
||||
|
||||
class RokuMediaPlayer(RokuEntity, MediaPlayerEntity):
|
||||
"""Representation of a Roku media player on the network."""
|
||||
|
||||
@@ -278,49 +241,13 @@ class RokuMediaPlayer(RokuEntity, MediaPlayerEntity):
|
||||
async def async_browse_media(self, media_content_type=None, media_content_id=None):
|
||||
"""Implement the websocket media browsing helper."""
|
||||
if media_content_type in [None, "library"]:
|
||||
is_tv = self.coordinator.data.info.device_type == "tv"
|
||||
return browse_media_library(channels=is_tv)
|
||||
return library_payload(self.coordinator)
|
||||
|
||||
response = None
|
||||
|
||||
if media_content_type == MEDIA_TYPE_APPS:
|
||||
response = BrowseMedia(
|
||||
title="Apps",
|
||||
media_content_id="apps",
|
||||
media_content_type=MEDIA_TYPE_APPS,
|
||||
can_expand=True,
|
||||
can_play=False,
|
||||
children=[
|
||||
BrowseMedia(
|
||||
title=app.name,
|
||||
thumbnail=self.coordinator.roku.app_icon_url(app.app_id),
|
||||
media_content_id=app.app_id,
|
||||
media_content_type=MEDIA_TYPE_APP,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
)
|
||||
for app in self.coordinator.data.apps
|
||||
],
|
||||
)
|
||||
|
||||
if media_content_type == MEDIA_TYPE_CHANNELS:
|
||||
response = BrowseMedia(
|
||||
title="Channels",
|
||||
media_content_id="channels",
|
||||
media_content_type=MEDIA_TYPE_CHANNELS,
|
||||
can_expand=True,
|
||||
can_play=False,
|
||||
children=[
|
||||
BrowseMedia(
|
||||
title=channel.name,
|
||||
media_content_id=channel.number,
|
||||
media_content_type=MEDIA_TYPE_CHANNEL,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
)
|
||||
for channel in self.coordinator.data.channels
|
||||
],
|
||||
)
|
||||
payload = {
|
||||
"search_type": media_content_type,
|
||||
"search_id": media_content_id,
|
||||
}
|
||||
response = build_item_response(self.coordinator, payload)
|
||||
|
||||
if response is None:
|
||||
raise BrowseError(
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.const import (
|
||||
CONF_ICON,
|
||||
CONF_MODE,
|
||||
CONF_SEQUENCE,
|
||||
CONF_VARIABLES,
|
||||
SERVICE_RELOAD,
|
||||
SERVICE_TOGGLE,
|
||||
SERVICE_TURN_OFF,
|
||||
@@ -59,6 +60,7 @@ SCRIPT_ENTRY_SCHEMA = make_script_schema(
|
||||
vol.Optional(CONF_ICON): cv.icon,
|
||||
vol.Required(CONF_SEQUENCE): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(CONF_DESCRIPTION, default=""): cv.string,
|
||||
vol.Optional(CONF_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA,
|
||||
vol.Optional(CONF_FIELDS, default={}): {
|
||||
cv.string: {
|
||||
vol.Optional(CONF_DESCRIPTION): cv.string,
|
||||
@@ -75,7 +77,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
SCRIPT_SERVICE_SCHEMA = vol.Schema(dict)
|
||||
SCRIPT_TURN_ONOFF_SCHEMA = make_entity_service_schema(
|
||||
{vol.Optional(ATTR_VARIABLES): dict}
|
||||
{vol.Optional(ATTR_VARIABLES): {str: cv.match_all}}
|
||||
)
|
||||
RELOAD_SERVICE_SCHEMA = vol.Schema({})
|
||||
|
||||
@@ -263,6 +265,7 @@ class ScriptEntity(ToggleEntity):
|
||||
max_runs=cfg[CONF_MAX],
|
||||
max_exceeded=cfg[CONF_MAX_EXCEEDED],
|
||||
logger=logging.getLogger(f"{__name__}.{object_id}"),
|
||||
variables=cfg.get(CONF_VARIABLES),
|
||||
)
|
||||
self._changed = asyncio.Event()
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ from homeassistant.helpers import aiohttp_client, device_registry, update_coordi
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PLATFORMS = ["binary_sensor", "light", "sensor", "switch"]
|
||||
PLATFORMS = ["binary_sensor", "cover", "light", "sensor", "switch"]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ from homeassistant.components.binary_sensor import (
|
||||
DEVICE_CLASS_GAS,
|
||||
DEVICE_CLASS_MOISTURE,
|
||||
DEVICE_CLASS_OPENING,
|
||||
DEVICE_CLASS_PROBLEM,
|
||||
DEVICE_CLASS_SMOKE,
|
||||
DEVICE_CLASS_VIBRATION,
|
||||
BinarySensorEntity,
|
||||
@@ -15,8 +16,18 @@ from .entity import (
|
||||
)
|
||||
|
||||
SENSORS = {
|
||||
("device", "overtemp"): BlockAttributeDescription(name="overtemp"),
|
||||
("relay", "overpower"): BlockAttributeDescription(name="overpower"),
|
||||
("device", "overtemp"): BlockAttributeDescription(
|
||||
name="Overheating", device_class=DEVICE_CLASS_PROBLEM
|
||||
),
|
||||
("device", "overpower"): BlockAttributeDescription(
|
||||
name="Over Power", device_class=DEVICE_CLASS_PROBLEM
|
||||
),
|
||||
("light", "overpower"): BlockAttributeDescription(
|
||||
name="Over Power", device_class=DEVICE_CLASS_PROBLEM
|
||||
),
|
||||
("relay", "overpower"): BlockAttributeDescription(
|
||||
name="Over Power", device_class=DEVICE_CLASS_PROBLEM
|
||||
),
|
||||
("sensor", "dwIsOpened"): BlockAttributeDescription(
|
||||
name="Door", device_class=DEVICE_CLASS_OPENING
|
||||
),
|
||||
|
||||
104
homeassistant/components/shelly/cover.py
Normal file
104
homeassistant/components/shelly/cover.py
Normal file
@@ -0,0 +1,104 @@
|
||||
"""Cover for Shelly."""
|
||||
from aioshelly import Block
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
SUPPORT_CLOSE,
|
||||
SUPPORT_OPEN,
|
||||
SUPPORT_SET_POSITION,
|
||||
SUPPORT_STOP,
|
||||
CoverEntity,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
|
||||
from . import ShellyDeviceWrapper
|
||||
from .const import DOMAIN
|
||||
from .entity import ShellyBlockEntity
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up cover for device."""
|
||||
wrapper = hass.data[DOMAIN][config_entry.entry_id]
|
||||
blocks = [block for block in wrapper.device.blocks if block.type == "roller"]
|
||||
|
||||
if not blocks:
|
||||
return
|
||||
|
||||
async_add_entities(ShellyCover(wrapper, block) for block in blocks)
|
||||
|
||||
|
||||
class ShellyCover(ShellyBlockEntity, CoverEntity):
|
||||
"""Switch that controls a cover block on Shelly devices."""
|
||||
|
||||
def __init__(self, wrapper: ShellyDeviceWrapper, block: Block) -> None:
|
||||
"""Initialize light."""
|
||||
super().__init__(wrapper, block)
|
||||
self.control_result = None
|
||||
self._supported_features = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP
|
||||
if self.wrapper.device.settings["rollers"][0]["positioning"]:
|
||||
self._supported_features |= SUPPORT_SET_POSITION
|
||||
|
||||
@property
|
||||
def is_closed(self):
|
||||
"""If cover is closed."""
|
||||
if self.control_result:
|
||||
return self.control_result["current_pos"] == 0
|
||||
|
||||
return self.block.rollerPos == 0
|
||||
|
||||
@property
|
||||
def current_cover_position(self):
|
||||
"""Position of the cover."""
|
||||
if self.control_result:
|
||||
return self.control_result["current_pos"]
|
||||
|
||||
return self.block.rollerPos
|
||||
|
||||
@property
|
||||
def is_closing(self):
|
||||
"""Return if the cover is closing."""
|
||||
if self.control_result:
|
||||
return self.control_result["state"] == "close"
|
||||
|
||||
return self.block.roller == "close"
|
||||
|
||||
@property
|
||||
def is_opening(self):
|
||||
"""Return if the cover is opening."""
|
||||
if self.control_result:
|
||||
return self.control_result["state"] == "open"
|
||||
|
||||
return self.block.roller == "open"
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag supported features."""
|
||||
return self._supported_features
|
||||
|
||||
async def async_close_cover(self, **kwargs):
|
||||
"""Close cover."""
|
||||
self.control_result = await self.block.set_state(go="close")
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_open_cover(self, **kwargs):
|
||||
"""Open cover."""
|
||||
self.control_result = await self.block.set_state(go="open")
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_cover_position(self, **kwargs):
|
||||
"""Move the cover to a specific position."""
|
||||
self.control_result = await self.block.set_state(
|
||||
go="to_pos", roller_pos=kwargs[ATTR_POSITION]
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_stop_cover(self, **_kwargs):
|
||||
"""Stop the cover."""
|
||||
self.control_result = await self.block.set_state(go="stop")
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _update_callback(self):
|
||||
"""When device updates, clear control result that overrides state."""
|
||||
self.control_result = None
|
||||
super()._update_callback()
|
||||
@@ -2,8 +2,8 @@
|
||||
"domain": "shelly",
|
||||
"name": "Shelly",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/shelly2",
|
||||
"requirements": ["aioshelly==0.3.0"],
|
||||
"zeroconf": ["_http._tcp.local."],
|
||||
"documentation": "https://www.home-assistant.io/integrations/shelly",
|
||||
"requirements": ["aioshelly==0.3.1"],
|
||||
"zeroconf": [{ "type": "_http._tcp.local.", "name": "shelly*" }],
|
||||
"codeowners": ["@balloob", "@bieniu"]
|
||||
}
|
||||
|
||||
@@ -40,6 +40,43 @@ SENSORS = {
|
||||
device_class=sensor.DEVICE_CLASS_POWER,
|
||||
default_enabled=False,
|
||||
),
|
||||
("device", "power"): BlockAttributeDescription(
|
||||
name="Power",
|
||||
unit=POWER_WATT,
|
||||
value=lambda value: round(value, 1),
|
||||
device_class=sensor.DEVICE_CLASS_POWER,
|
||||
),
|
||||
("emeter", "power"): BlockAttributeDescription(
|
||||
name="Power",
|
||||
unit=POWER_WATT,
|
||||
value=lambda value: round(value, 1),
|
||||
device_class=sensor.DEVICE_CLASS_POWER,
|
||||
),
|
||||
("relay", "power"): BlockAttributeDescription(
|
||||
name="Power",
|
||||
unit=POWER_WATT,
|
||||
value=lambda value: round(value, 1),
|
||||
device_class=sensor.DEVICE_CLASS_POWER,
|
||||
),
|
||||
("device", "energy"): BlockAttributeDescription(
|
||||
name="Energy",
|
||||
unit=ENERGY_KILO_WATT_HOUR,
|
||||
value=lambda value: round(value / 60 / 1000, 2),
|
||||
device_class=sensor.DEVICE_CLASS_ENERGY,
|
||||
),
|
||||
("emeter", "energy"): BlockAttributeDescription(
|
||||
name="Energy",
|
||||
unit=ENERGY_KILO_WATT_HOUR,
|
||||
value=lambda value: round(value / 1000, 2),
|
||||
device_class=sensor.DEVICE_CLASS_ENERGY,
|
||||
),
|
||||
("light", "energy"): BlockAttributeDescription(
|
||||
name="Energy",
|
||||
unit=ENERGY_KILO_WATT_HOUR,
|
||||
value=lambda value: round(value / 60 / 1000, 2),
|
||||
device_class=sensor.DEVICE_CLASS_ENERGY,
|
||||
default_enabled=False,
|
||||
),
|
||||
("relay", "energy"): BlockAttributeDescription(
|
||||
name="Energy",
|
||||
unit=ENERGY_KILO_WATT_HOUR,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Switch for Shelly."""
|
||||
from aioshelly import RelayBlock
|
||||
from aioshelly import Block
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import callback
|
||||
@@ -13,6 +13,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Set up switches for device."""
|
||||
wrapper = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
# In roller mode the relay blocks exist but do not contain required info
|
||||
if wrapper.model == "SHSW-25" and wrapper.device.settings["mode"] != "relay":
|
||||
return
|
||||
|
||||
relay_blocks = [block for block in wrapper.device.blocks if block.type == "relay"]
|
||||
|
||||
if not relay_blocks:
|
||||
@@ -24,7 +28,7 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
class RelaySwitch(ShellyBlockEntity, SwitchEntity):
|
||||
"""Switch that controls a relay block on Shelly devices."""
|
||||
|
||||
def __init__(self, wrapper: ShellyDeviceWrapper, block: RelayBlock) -> None:
|
||||
def __init__(self, wrapper: ShellyDeviceWrapper, block: Block) -> None:
|
||||
"""Initialize relay switch."""
|
||||
super().__init__(wrapper, block)
|
||||
self.control_result = None
|
||||
|
||||
@@ -5,12 +5,13 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/smappee",
|
||||
"dependencies": ["http"],
|
||||
"requirements": [
|
||||
"pysmappee==0.2.10"
|
||||
"pysmappee==0.2.13"
|
||||
],
|
||||
"codeowners": [
|
||||
"@bsmappee"
|
||||
],
|
||||
"zeroconf": [
|
||||
"_ssh._tcp.local."
|
||||
{"type":"_ssh._tcp.local.", "name":"smappee1*"},
|
||||
{"type":"_ssh._tcp.local.", "name":"smappee2*"}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,34 +1,35 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "Smappee: {name}",
|
||||
"step": {
|
||||
"environment": {
|
||||
"description": "Set up your Smappee to integrate with Home Assistant.",
|
||||
"data": {
|
||||
"environment": "Environment"
|
||||
}
|
||||
},
|
||||
"local": {
|
||||
"description": "Enter the host to initiate the Smappee local integration",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
}
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"description": "Do you want to add the Smappee device with serialnumber `{serialnumber}` to Home Assistant?",
|
||||
"title": "Discovered Smappee device"
|
||||
},
|
||||
"pick_implementation": {
|
||||
"title": "Pick Authentication Method"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured_device": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_configured_local_device": "Local device(s) is already configured. Please remove those first before configuring a cloud device.",
|
||||
"authorize_url_timeout": "Timeout generating authorize url.",
|
||||
"connection_error": "Failed to connect to Smappee device.",
|
||||
"missing_configuration": "The component is not configured. Please follow the documentation.",
|
||||
"invalid_mdns": "Unsupported device for the Smappee integration."
|
||||
"config": {
|
||||
"flow_title": "Smappee: {name}",
|
||||
"step": {
|
||||
"environment": {
|
||||
"description": "Set up your Smappee to integrate with Home Assistant.",
|
||||
"data": {
|
||||
"environment": "Environment"
|
||||
}
|
||||
},
|
||||
"local": {
|
||||
"description": "Enter the host to initiate the Smappee local integration",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
}
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"description": "Do you want to add the Smappee device with serialnumber `{serialnumber}` to Home Assistant?",
|
||||
"title": "Discovered Smappee device"
|
||||
},
|
||||
"pick_implementation": {
|
||||
"title": "Pick Authentication Method"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured_device": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_configured_local_device": "Local device(s) is already configured. Please remove those first before configuring a cloud device.",
|
||||
"authorize_url_timeout": "Timeout generating authorize url.",
|
||||
"connection_error": "Failed to connect to Smappee device.",
|
||||
"missing_configuration": "The component is not configured. Please follow the documentation.",
|
||||
"invalid_mdns": "Unsupported device for the Smappee integration.",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
"abort": {
|
||||
"already_setup": "You can only configure one Somfy account.",
|
||||
"authorize_url_timeout": "Timeout generating authorize url.",
|
||||
"missing_configuration": "The Somfy component is not configured. Please follow the documentation."
|
||||
"missing_configuration": "The Somfy component is not configured. Please follow the documentation.",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]"
|
||||
},
|
||||
"create_entry": { "default": "Successfully authenticated with Somfy." }
|
||||
}
|
||||
|
||||
@@ -17,6 +17,14 @@ import voluptuous as vol
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaPlayerEntity
|
||||
from homeassistant.components.media_player.const import (
|
||||
ATTR_MEDIA_ENQUEUE,
|
||||
MEDIA_CLASS_ALBUM,
|
||||
MEDIA_CLASS_ARTIST,
|
||||
MEDIA_CLASS_COMPOSER,
|
||||
MEDIA_CLASS_CONTRIBUTING_ARTIST,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_CLASS_GENRE,
|
||||
MEDIA_CLASS_PLAYLIST,
|
||||
MEDIA_CLASS_TRACK,
|
||||
MEDIA_TYPE_ALBUM,
|
||||
MEDIA_TYPE_ARTIST,
|
||||
MEDIA_TYPE_COMPOSER,
|
||||
@@ -103,6 +111,23 @@ EXPANDABLE_MEDIA_TYPES = [
|
||||
SONOS_PLAYLISTS,
|
||||
]
|
||||
|
||||
SONOS_TO_MEDIA_CLASSES = {
|
||||
SONOS_ALBUM: MEDIA_CLASS_ALBUM,
|
||||
SONOS_ALBUM_ARTIST: MEDIA_CLASS_ARTIST,
|
||||
SONOS_ARTIST: MEDIA_CLASS_CONTRIBUTING_ARTIST,
|
||||
SONOS_COMPOSER: MEDIA_CLASS_COMPOSER,
|
||||
SONOS_GENRE: MEDIA_CLASS_GENRE,
|
||||
SONOS_PLAYLISTS: MEDIA_CLASS_PLAYLIST,
|
||||
SONOS_TRACKS: MEDIA_CLASS_TRACK,
|
||||
"object.container.album.musicAlbum": MEDIA_CLASS_ALBUM,
|
||||
"object.container.genre.musicGenre": MEDIA_CLASS_PLAYLIST,
|
||||
"object.container.person.composer": MEDIA_CLASS_PLAYLIST,
|
||||
"object.container.person.musicArtist": MEDIA_CLASS_ARTIST,
|
||||
"object.container.playlistContainer.sameArtist": MEDIA_CLASS_ARTIST,
|
||||
"object.container.playlistContainer": MEDIA_CLASS_PLAYLIST,
|
||||
"object.item.audioItem.musicTrack": MEDIA_CLASS_TRACK,
|
||||
}
|
||||
|
||||
SONOS_TO_MEDIA_TYPES = {
|
||||
SONOS_ALBUM: MEDIA_TYPE_ALBUM,
|
||||
SONOS_ALBUM_ARTIST: MEDIA_TYPE_ARTIST,
|
||||
@@ -197,6 +222,10 @@ ATTR_STATUS_LIGHT = "status_light"
|
||||
UNAVAILABLE_VALUES = {"", "NOT_IMPLEMENTED", None}
|
||||
|
||||
|
||||
class UnknownMediaType(BrowseError):
|
||||
"""Unknown media type."""
|
||||
|
||||
|
||||
class SonosData:
|
||||
"""Storage class for platform global data."""
|
||||
|
||||
@@ -1462,12 +1491,28 @@ def build_item_response(media_library, payload):
|
||||
except IndexError:
|
||||
title = LIBRARY_TITLES_MAPPING[payload["idstring"]]
|
||||
|
||||
try:
|
||||
media_class = SONOS_TO_MEDIA_CLASSES[
|
||||
MEDIA_TYPES_TO_SONOS[payload["search_type"]]
|
||||
]
|
||||
except KeyError:
|
||||
_LOGGER.debug("Unknown media type received %s", payload["search_type"])
|
||||
return None
|
||||
|
||||
children = []
|
||||
for item in media:
|
||||
try:
|
||||
children.append(item_payload(item))
|
||||
except UnknownMediaType:
|
||||
pass
|
||||
|
||||
return BrowseMedia(
|
||||
title=title,
|
||||
thumbnail=thumbnail,
|
||||
media_class=media_class,
|
||||
media_content_id=payload["idstring"],
|
||||
media_content_type=payload["search_type"],
|
||||
children=[item_payload(item) for item in media],
|
||||
children=children,
|
||||
can_play=can_play(payload["search_type"]),
|
||||
can_expand=can_expand(payload["search_type"]),
|
||||
)
|
||||
@@ -1479,11 +1524,18 @@ def item_payload(item):
|
||||
|
||||
Used by async_browse_media.
|
||||
"""
|
||||
media_type = get_media_type(item)
|
||||
try:
|
||||
media_class = SONOS_TO_MEDIA_CLASSES[media_type]
|
||||
except KeyError as err:
|
||||
_LOGGER.debug("Unknown media type received %s", media_type)
|
||||
raise UnknownMediaType from err
|
||||
return BrowseMedia(
|
||||
title=item.title,
|
||||
thumbnail=getattr(item, "album_art_uri", None),
|
||||
media_class=media_class,
|
||||
media_content_id=get_content_id(item),
|
||||
media_content_type=SONOS_TO_MEDIA_TYPES[get_media_type(item)],
|
||||
media_content_type=SONOS_TO_MEDIA_TYPES[media_type],
|
||||
can_play=can_play(item.item_class),
|
||||
can_expand=can_expand(item),
|
||||
)
|
||||
@@ -1495,13 +1547,21 @@ def library_payload(media_library):
|
||||
|
||||
Used by async_browse_media.
|
||||
"""
|
||||
children = []
|
||||
for item in media_library.browse():
|
||||
try:
|
||||
children.append(item_payload(item))
|
||||
except UnknownMediaType:
|
||||
pass
|
||||
|
||||
return BrowseMedia(
|
||||
title="Music Library",
|
||||
media_class=MEDIA_CLASS_DIRECTORY,
|
||||
media_content_id="library",
|
||||
media_content_type="library",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children=[item_payload(item) for item in media_library.browse()],
|
||||
children=children,
|
||||
)
|
||||
|
||||
|
||||
@@ -1565,6 +1625,7 @@ def get_media(media_library, item_id, search_type):
|
||||
search_type,
|
||||
"/".join(item_id.split("/")[:-1]),
|
||||
full_album_art_uri=True,
|
||||
max_items=0,
|
||||
):
|
||||
if item.item_id == item_id:
|
||||
return item
|
||||
|
||||
@@ -11,6 +11,14 @@ from yarl import URL
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaPlayerEntity
|
||||
from homeassistant.components.media_player.const import (
|
||||
MEDIA_CLASS_ALBUM,
|
||||
MEDIA_CLASS_ARTIST,
|
||||
MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_CLASS_EPISODE,
|
||||
MEDIA_CLASS_GENRE,
|
||||
MEDIA_CLASS_PLAYLIST,
|
||||
MEDIA_CLASS_PODCAST,
|
||||
MEDIA_CLASS_TRACK,
|
||||
MEDIA_TYPE_ALBUM,
|
||||
MEDIA_TYPE_ARTIST,
|
||||
MEDIA_TYPE_EPISODE,
|
||||
@@ -96,6 +104,35 @@ LIBRARY_MAP = {
|
||||
"new_releases": "New Releases",
|
||||
}
|
||||
|
||||
CONTENT_TYPE_MEDIA_CLASS = {
|
||||
"current_user_playlists": MEDIA_CLASS_DIRECTORY,
|
||||
"current_user_followed_artists": MEDIA_CLASS_DIRECTORY,
|
||||
"current_user_saved_albums": MEDIA_CLASS_DIRECTORY,
|
||||
"current_user_saved_tracks": MEDIA_CLASS_DIRECTORY,
|
||||
"current_user_saved_shows": MEDIA_CLASS_DIRECTORY,
|
||||
"current_user_recently_played": MEDIA_CLASS_DIRECTORY,
|
||||
"current_user_top_artists": MEDIA_CLASS_DIRECTORY,
|
||||
"current_user_top_tracks": MEDIA_CLASS_DIRECTORY,
|
||||
"featured_playlists": MEDIA_CLASS_DIRECTORY,
|
||||
"categories": MEDIA_CLASS_DIRECTORY,
|
||||
"category_playlists": MEDIA_CLASS_DIRECTORY,
|
||||
"new_releases": MEDIA_CLASS_DIRECTORY,
|
||||
MEDIA_TYPE_PLAYLIST: MEDIA_CLASS_PLAYLIST,
|
||||
MEDIA_TYPE_ALBUM: MEDIA_CLASS_ALBUM,
|
||||
MEDIA_TYPE_ARTIST: MEDIA_CLASS_ARTIST,
|
||||
MEDIA_TYPE_EPISODE: MEDIA_CLASS_EPISODE,
|
||||
MEDIA_TYPE_SHOW: MEDIA_CLASS_PODCAST,
|
||||
MEDIA_TYPE_TRACK: MEDIA_CLASS_TRACK,
|
||||
}
|
||||
|
||||
|
||||
class MissingMediaInformation(BrowseError):
|
||||
"""Missing media required information."""
|
||||
|
||||
|
||||
class UnknownMediaType(BrowseError):
|
||||
"""Unknown media type."""
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -437,16 +474,16 @@ def build_item_response(spotify, user, payload):
|
||||
items = media.get("artists", {}).get("items", [])
|
||||
elif media_content_type == "current_user_saved_albums":
|
||||
media = spotify.current_user_saved_albums(limit=BROWSE_LIMIT)
|
||||
items = media.get("items", [])
|
||||
items = [item["album"] for item in media.get("items", [])]
|
||||
elif media_content_type == "current_user_saved_tracks":
|
||||
media = spotify.current_user_saved_tracks(limit=BROWSE_LIMIT)
|
||||
items = media.get("items", [])
|
||||
items = [item["track"] for item in media.get("items", [])]
|
||||
elif media_content_type == "current_user_saved_shows":
|
||||
media = spotify.current_user_saved_shows(limit=BROWSE_LIMIT)
|
||||
items = media.get("items", [])
|
||||
items = [item["show"] for item in media.get("items", [])]
|
||||
elif media_content_type == "current_user_recently_played":
|
||||
media = spotify.current_user_recently_played(limit=BROWSE_LIMIT)
|
||||
items = media.get("items", [])
|
||||
items = [item["track"] for item in media.get("items", [])]
|
||||
elif media_content_type == "current_user_top_artists":
|
||||
media = spotify.current_user_top_artists(limit=BROWSE_LIMIT)
|
||||
items = media.get("items", [])
|
||||
@@ -474,7 +511,7 @@ def build_item_response(spotify, user, payload):
|
||||
items = media.get("albums", {}).get("items", [])
|
||||
elif media_content_type == MEDIA_TYPE_PLAYLIST:
|
||||
media = spotify.playlist(media_content_id)
|
||||
items = media.get("tracks", {}).get("items", [])
|
||||
items = [item["track"] for item in media.get("tracks", {}).get("items", [])]
|
||||
elif media_content_type == MEDIA_TYPE_ALBUM:
|
||||
media = spotify.album(media_content_id)
|
||||
items = media.get("tracks", {}).get("items", [])
|
||||
@@ -497,25 +534,41 @@ def build_item_response(spotify, user, payload):
|
||||
if media is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
media_class = CONTENT_TYPE_MEDIA_CLASS[media_content_type]
|
||||
except KeyError:
|
||||
_LOGGER.debug("Unknown media type received: %s", media_content_type)
|
||||
return None
|
||||
|
||||
if media_content_type == "categories":
|
||||
return BrowseMedia(
|
||||
media_item = BrowseMedia(
|
||||
title=LIBRARY_MAP.get(media_content_id),
|
||||
media_class=media_class,
|
||||
media_content_id=media_content_id,
|
||||
media_content_type=media_content_type,
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children=[
|
||||
children=[],
|
||||
)
|
||||
for item in items:
|
||||
try:
|
||||
item_id = item["id"]
|
||||
except KeyError:
|
||||
_LOGGER.debug("Missing id for media item: %s", item)
|
||||
continue
|
||||
media_item.children.append(
|
||||
BrowseMedia(
|
||||
title=item.get("name"),
|
||||
media_content_id=item["id"],
|
||||
media_class=MEDIA_CLASS_PLAYLIST,
|
||||
media_content_id=item_id,
|
||||
media_content_type="category_playlists",
|
||||
thumbnail=fetch_image_url(item, key="icons"),
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
)
|
||||
for item in items
|
||||
],
|
||||
)
|
||||
)
|
||||
media_item.children_media_class = MEDIA_CLASS_GENRE
|
||||
return media_item
|
||||
|
||||
if title is None:
|
||||
if "name" in media:
|
||||
@@ -523,21 +576,27 @@ def build_item_response(spotify, user, payload):
|
||||
else:
|
||||
title = LIBRARY_MAP.get(payload["media_content_id"])
|
||||
|
||||
response = {
|
||||
params = {
|
||||
"title": title,
|
||||
"media_class": media_class,
|
||||
"media_content_id": media_content_id,
|
||||
"media_content_type": media_content_type,
|
||||
"can_play": media_content_type in PLAYABLE_MEDIA_TYPES,
|
||||
"children": [item_payload(item) for item in items],
|
||||
"children": [],
|
||||
"can_expand": True,
|
||||
}
|
||||
for item in items:
|
||||
try:
|
||||
params["children"].append(item_payload(item))
|
||||
except (MissingMediaInformation, UnknownMediaType):
|
||||
continue
|
||||
|
||||
if "images" in media:
|
||||
response["thumbnail"] = fetch_image_url(media)
|
||||
params["thumbnail"] = fetch_image_url(media)
|
||||
elif image:
|
||||
response["thumbnail"] = image
|
||||
params["thumbnail"] = image
|
||||
|
||||
return BrowseMedia(**response)
|
||||
return BrowseMedia(**params)
|
||||
|
||||
|
||||
def item_payload(item):
|
||||
@@ -546,25 +605,30 @@ def item_payload(item):
|
||||
|
||||
Used by async_browse_media.
|
||||
"""
|
||||
if MEDIA_TYPE_TRACK in item:
|
||||
item = item[MEDIA_TYPE_TRACK]
|
||||
elif MEDIA_TYPE_SHOW in item:
|
||||
item = item[MEDIA_TYPE_SHOW]
|
||||
elif MEDIA_TYPE_ARTIST in item:
|
||||
item = item[MEDIA_TYPE_ARTIST]
|
||||
elif MEDIA_TYPE_ALBUM in item and item["type"] != MEDIA_TYPE_TRACK:
|
||||
item = item[MEDIA_TYPE_ALBUM]
|
||||
try:
|
||||
media_type = item["type"]
|
||||
media_id = item["uri"]
|
||||
except KeyError as err:
|
||||
_LOGGER.debug("Missing type or uri for media item: %s", item)
|
||||
raise MissingMediaInformation from err
|
||||
|
||||
can_expand = item["type"] not in [
|
||||
try:
|
||||
media_class = CONTENT_TYPE_MEDIA_CLASS[media_type]
|
||||
except KeyError as err:
|
||||
_LOGGER.debug("Unknown media type received: %s", media_type)
|
||||
raise UnknownMediaType from err
|
||||
|
||||
can_expand = media_type not in [
|
||||
MEDIA_TYPE_TRACK,
|
||||
MEDIA_TYPE_EPISODE,
|
||||
]
|
||||
|
||||
payload = {
|
||||
"title": item.get("name"),
|
||||
"media_content_id": item["uri"],
|
||||
"media_content_type": item["type"],
|
||||
"can_play": item["type"] in PLAYABLE_MEDIA_TYPES,
|
||||
"media_class": media_class,
|
||||
"media_content_id": media_id,
|
||||
"media_content_type": media_type,
|
||||
"can_play": media_type in PLAYABLE_MEDIA_TYPES,
|
||||
"can_expand": can_expand,
|
||||
}
|
||||
|
||||
@@ -584,6 +648,7 @@ def library_payload():
|
||||
"""
|
||||
library_info = {
|
||||
"title": "Media Library",
|
||||
"media_class": MEDIA_CLASS_DIRECTORY,
|
||||
"media_content_id": "library",
|
||||
"media_content_type": "library",
|
||||
"can_play": False,
|
||||
@@ -597,7 +662,9 @@ def library_payload():
|
||||
{"name": item["name"], "type": item["type"], "uri": item["type"]}
|
||||
)
|
||||
)
|
||||
return BrowseMedia(**library_info)
|
||||
response = BrowseMedia(**library_info)
|
||||
response.children_media_class = MEDIA_CLASS_DIRECTORY
|
||||
return response
|
||||
|
||||
|
||||
def fetch_image_url(item, key="images"):
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"abort": {
|
||||
"already_setup": "You can only configure one Spotify account.",
|
||||
"authorize_url_timeout": "Timeout generating authorize url.",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"missing_configuration": "The Spotify integration is not configured. Please follow the documentation.",
|
||||
"reauth_account_mismatch": "The Spotify account authenticated with, does not match the account needed re-authentication."
|
||||
},
|
||||
|
||||
@@ -64,11 +64,16 @@ def _stream_worker_internal(hass, stream, quit_event):
|
||||
video_stream = container.streams.video[0]
|
||||
except (KeyError, IndexError):
|
||||
_LOGGER.error("Stream has no video")
|
||||
container.close()
|
||||
return
|
||||
try:
|
||||
audio_stream = container.streams.audio[0]
|
||||
except (KeyError, IndexError):
|
||||
audio_stream = None
|
||||
# These formats need aac_adtstoasc bitstream filter, but auto_bsf not
|
||||
# compatible with empty_moov and manual bitstream filters not in PyAV
|
||||
if container.format.name in {"hls", "mpegts"}:
|
||||
audio_stream = None
|
||||
|
||||
# The presentation timestamps of the first packet in each stream we receive
|
||||
# Use to adjust before muxing or outputting, but we don't adjust internally
|
||||
@@ -238,7 +243,7 @@ def _stream_worker_internal(hass, stream, quit_event):
|
||||
|
||||
# Update last_dts processed
|
||||
last_dts[packet.stream] = packet.dts
|
||||
# mux video packets immediately, save audio packets to be muxed all at once
|
||||
# mux packets
|
||||
if packet.stream == video_stream:
|
||||
mux_video_packet(packet) # mutates packet timestamps
|
||||
else:
|
||||
|
||||
@@ -194,3 +194,8 @@ class BinarySensorTemplate(TemplateEntity, BinarySensorEntity):
|
||||
def is_on(self):
|
||||
"""Return true if sensor is on."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the sensor class of the binary sensor."""
|
||||
return self._device_class
|
||||
|
||||
@@ -249,15 +249,16 @@ class CoverTemplate(TemplateEntity, CoverEntity):
|
||||
self._position = None
|
||||
return
|
||||
|
||||
if result in _VALID_STATES:
|
||||
if result in ("true", STATE_OPEN):
|
||||
state = result.lower()
|
||||
if state in _VALID_STATES:
|
||||
if state in ("true", STATE_OPEN):
|
||||
self._position = 100
|
||||
else:
|
||||
self._position = 0
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Received invalid cover is_on state: %s. Expected: %s",
|
||||
result,
|
||||
state,
|
||||
", ".join(_VALID_STATES),
|
||||
)
|
||||
self._position = None
|
||||
|
||||
@@ -412,7 +412,7 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||
self._available = True
|
||||
return
|
||||
|
||||
state = str(result).lower()
|
||||
state = result.lower()
|
||||
if state in _VALID_STATES:
|
||||
self._state = state in ("true", STATE_ON)
|
||||
else:
|
||||
|
||||
@@ -121,6 +121,7 @@ class TemplateEntity(Entity):
|
||||
"""Template Entity."""
|
||||
self._template_attrs = {}
|
||||
self._async_update = None
|
||||
self._async_update_entity_ids_filter = None
|
||||
self._attribute_templates = attribute_templates
|
||||
self._attributes = {}
|
||||
self._availability_template = availability_template
|
||||
@@ -231,6 +232,9 @@ class TemplateEntity(Entity):
|
||||
event, update.template, update.last_result, update.result
|
||||
)
|
||||
|
||||
if self._async_update_entity_ids_filter:
|
||||
self._async_update_entity_ids_filter({self.entity_id})
|
||||
|
||||
if self._async_update:
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -245,8 +249,12 @@ class TemplateEntity(Entity):
|
||||
)
|
||||
self.async_on_remove(result_info.async_remove)
|
||||
result_info.async_refresh()
|
||||
result_info.async_update_entity_ids_filter({self.entity_id})
|
||||
self.async_write_ha_state()
|
||||
self._async_update = result_info.async_refresh
|
||||
self._async_update_entity_ids_filter = (
|
||||
result_info.async_update_entity_ids_filter
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
@@ -17,7 +17,8 @@
|
||||
"authorize_url_fail": "Unknown error generating an authorize url.",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"no_agreements": "This account has no Toon displays."
|
||||
"no_agreements": "This account has no Toon displays.",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,8 @@
|
||||
"abort": {
|
||||
"authorize_url_timeout": "Timeout generating authorize url.",
|
||||
"missing_configuration": "The Withings integration is not configured. Please follow the documentation.",
|
||||
"already_configured": "Configuration updated for profile."
|
||||
"already_configured": "Configuration updated for profile.",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]"
|
||||
},
|
||||
"create_entry": { "default": "Successfully authenticated with Withings." }
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Support for exposing Home Assistant via Zeroconf."""
|
||||
import asyncio
|
||||
import fnmatch
|
||||
import ipaddress
|
||||
import logging
|
||||
import socket
|
||||
@@ -268,10 +269,26 @@ def setup(hass, config):
|
||||
# likely bad homekit data
|
||||
return
|
||||
|
||||
for domain in zeroconf_types[service_type]:
|
||||
for entry in zeroconf_types[service_type]:
|
||||
if len(entry) > 1:
|
||||
if "macaddress" in entry:
|
||||
if "properties" not in info:
|
||||
continue
|
||||
if "macaddress" not in info["properties"]:
|
||||
continue
|
||||
if not fnmatch.fnmatch(
|
||||
info["properties"]["macaddress"], entry["macaddress"]
|
||||
):
|
||||
continue
|
||||
if "name" in entry:
|
||||
if "name" not in info:
|
||||
continue
|
||||
if not fnmatch.fnmatch(info["name"], entry["name"]):
|
||||
continue
|
||||
|
||||
hass.add_job(
|
||||
hass.config_entries.flow.async_init(
|
||||
domain, context={"source": DOMAIN}, data=info
|
||||
entry["domain"], context={"source": DOMAIN}, data=info
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "zeroconf",
|
||||
"name": "Zero-configuration networking (zeroconf)",
|
||||
"documentation": "https://www.home-assistant.io/integrations/zeroconf",
|
||||
"requirements": ["zeroconf==0.28.4"],
|
||||
"requirements": ["zeroconf==0.28.5"],
|
||||
"dependencies": ["api"],
|
||||
"codeowners": ["@Kane610"],
|
||||
"quality_scale": "internal"
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"zha-quirks==0.0.44",
|
||||
"zigpy-cc==0.5.2",
|
||||
"zigpy-deconz==0.9.2",
|
||||
"zigpy==0.23.1",
|
||||
"zigpy==0.23.2",
|
||||
"zigpy-xbee==0.13.0",
|
||||
"zigpy-zigate==0.6.2",
|
||||
"zigpy-znp==0.1.1"
|
||||
|
||||
@@ -132,11 +132,11 @@ warning_device_warn:
|
||||
example: "00:0d:6f:00:05:7d:2d:34"
|
||||
mode:
|
||||
description: >-
|
||||
The Warning Mode field is used as an 4-bit enumeration, can have one of the values defined below in table 8-20 of the ZCL spec. The exact behavior of the WD device in each mode is according to the relevant security standards.
|
||||
The Warning Mode field is used as an 4-bit enumeration, can have one of the values 0-6 defined below in table 8-20 of the ZCL spec. The exact behavior of the WD device in each mode is according to the relevant security standards.
|
||||
example: 1
|
||||
strobe:
|
||||
description: >-
|
||||
The Strobe field is used as a 2-bit enumeration, and determines if the visual indication is required in addition to the audible siren, as indicated in Table 8-21 of the ZCL spec. If the strobe field is “1” and the Warning Mode is “0” (“Stop”) then only the strobe is activated.
|
||||
The Strobe field is used as a 2-bit enumeration, and determines if the visual indication is required in addition to the audible siren, as indicated in Table 8-21 of the ZCL spec. "0" means no strobe, "1" means strobe. If the strobe field is “1” and the Warning Mode is “0” (“Stop”) then only the strobe is activated.
|
||||
example: 1
|
||||
level:
|
||||
description: >-
|
||||
@@ -144,12 +144,12 @@ warning_device_warn:
|
||||
example: 2
|
||||
duration:
|
||||
description: >-
|
||||
Requested duration of warning, in seconds. If both Strobe and Warning Mode are "0" this field SHALL be ignored.
|
||||
Requested duration of warning, in seconds (16 bit). If both Strobe and Warning Mode are "0" this field SHALL be ignored.
|
||||
example: 2
|
||||
duty_cycle:
|
||||
description: >-
|
||||
Indicates the length of the flash cycle. This provides a means of varying the flash duration for different alarm types (e.g., fire, police, burglar). Valid range is 0-100 in increments of 10. All other values SHALL be rounded to the nearest valid value. Strobe SHALL calculate duty cycle over a duration of one second. The ON state SHALL precede the OFF state. For example, if Strobe Duty Cycle Field specifies “40,” then the strobe SHALL flash ON for 4/10ths of a second and then turn OFF for 6/10ths of a second.
|
||||
example: 2
|
||||
example: 50
|
||||
intensity:
|
||||
description: >-
|
||||
Indicates the intensity of the strobe as shown in Table 8-23 of the ZCL spec. This attribute is designed to vary the output of the strobe (i.e., brightness) and not its frequency, which is detailed in section 8.4.2.3.1.6 of the ZCL spec.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Constants used by Home Assistant components."""
|
||||
MAJOR_VERSION = 0
|
||||
MINOR_VERSION = 115
|
||||
PATCH_VERSION = "0b0"
|
||||
PATCH_VERSION = "0b6"
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER = (3, 7, 1)
|
||||
@@ -179,6 +179,7 @@ CONF_UNTIL = "until"
|
||||
CONF_URL = "url"
|
||||
CONF_USERNAME = "username"
|
||||
CONF_VALUE_TEMPLATE = "value_template"
|
||||
CONF_VARIABLES = "variables"
|
||||
CONF_VERIFY_SSL = "verify_ssl"
|
||||
CONF_WAIT_FOR_TRIGGER = "wait_for_trigger"
|
||||
CONF_WAIT_TEMPLATE = "wait_template"
|
||||
|
||||
@@ -7,72 +7,137 @@ To update, run python3 -m script.hassfest
|
||||
|
||||
ZEROCONF = {
|
||||
"_Volumio._tcp.local.": [
|
||||
"volumio"
|
||||
{
|
||||
"domain": "volumio"
|
||||
}
|
||||
],
|
||||
"_api._udp.local.": [
|
||||
"guardian"
|
||||
{
|
||||
"domain": "guardian"
|
||||
}
|
||||
],
|
||||
"_axis-video._tcp.local.": [
|
||||
"axis",
|
||||
"doorbird"
|
||||
{
|
||||
"domain": "axis",
|
||||
"macaddress": "00408C*"
|
||||
},
|
||||
{
|
||||
"domain": "axis",
|
||||
"macaddress": "ACCC8E*"
|
||||
},
|
||||
{
|
||||
"domain": "axis",
|
||||
"macaddress": "B8A44F*"
|
||||
},
|
||||
{
|
||||
"domain": "doorbird",
|
||||
"macaddress": "1CCAE3*"
|
||||
}
|
||||
],
|
||||
"_bond._tcp.local.": [
|
||||
"bond"
|
||||
{
|
||||
"domain": "bond"
|
||||
}
|
||||
],
|
||||
"_daap._tcp.local.": [
|
||||
"forked_daapd"
|
||||
{
|
||||
"domain": "forked_daapd"
|
||||
}
|
||||
],
|
||||
"_dkapi._tcp.local.": [
|
||||
"daikin"
|
||||
{
|
||||
"domain": "daikin"
|
||||
}
|
||||
],
|
||||
"_elg._tcp.local.": [
|
||||
"elgato"
|
||||
{
|
||||
"domain": "elgato"
|
||||
}
|
||||
],
|
||||
"_esphomelib._tcp.local.": [
|
||||
"esphome"
|
||||
{
|
||||
"domain": "esphome"
|
||||
}
|
||||
],
|
||||
"_googlecast._tcp.local.": [
|
||||
"cast"
|
||||
{
|
||||
"domain": "cast"
|
||||
}
|
||||
],
|
||||
"_hap._tcp.local.": [
|
||||
"homekit_controller"
|
||||
{
|
||||
"domain": "homekit_controller"
|
||||
}
|
||||
],
|
||||
"_http._tcp.local.": [
|
||||
"shelly"
|
||||
{
|
||||
"domain": "shelly",
|
||||
"name": "shelly*"
|
||||
}
|
||||
],
|
||||
"_ipp._tcp.local.": [
|
||||
"ipp"
|
||||
{
|
||||
"domain": "ipp"
|
||||
}
|
||||
],
|
||||
"_ipps._tcp.local.": [
|
||||
"ipp"
|
||||
{
|
||||
"domain": "ipp"
|
||||
}
|
||||
],
|
||||
"_miio._udp.local.": [
|
||||
"xiaomi_aqara",
|
||||
"xiaomi_miio"
|
||||
{
|
||||
"domain": "xiaomi_aqara"
|
||||
},
|
||||
{
|
||||
"domain": "xiaomi_miio"
|
||||
}
|
||||
],
|
||||
"_nut._tcp.local.": [
|
||||
"nut"
|
||||
{
|
||||
"domain": "nut"
|
||||
}
|
||||
],
|
||||
"_plugwise._tcp.local.": [
|
||||
"plugwise"
|
||||
{
|
||||
"domain": "plugwise"
|
||||
}
|
||||
],
|
||||
"_printer._tcp.local.": [
|
||||
"brother"
|
||||
{
|
||||
"domain": "brother",
|
||||
"name": "brother*"
|
||||
}
|
||||
],
|
||||
"_spotify-connect._tcp.local.": [
|
||||
"spotify"
|
||||
{
|
||||
"domain": "spotify"
|
||||
}
|
||||
],
|
||||
"_ssh._tcp.local.": [
|
||||
"smappee"
|
||||
{
|
||||
"domain": "smappee",
|
||||
"name": "smappee1*"
|
||||
},
|
||||
{
|
||||
"domain": "smappee",
|
||||
"name": "smappee2*"
|
||||
}
|
||||
],
|
||||
"_viziocast._tcp.local.": [
|
||||
"vizio"
|
||||
{
|
||||
"domain": "vizio"
|
||||
}
|
||||
],
|
||||
"_wled._tcp.local.": [
|
||||
"wled"
|
||||
{
|
||||
"domain": "wled"
|
||||
}
|
||||
],
|
||||
"_xbmc-jsonrpc-h._tcp.local.": [
|
||||
"kodi"
|
||||
{
|
||||
"domain": "kodi"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ from yarl import URL
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.network import get_url
|
||||
from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
|
||||
from .aiohttp_client import async_get_clientsession
|
||||
|
||||
@@ -251,6 +251,13 @@ class AbstractOAuth2FlowHandler(config_entries.ConfigFlow, metaclass=ABCMeta):
|
||||
url = await self.flow_impl.async_generate_authorize_url(self.flow_id)
|
||||
except asyncio.TimeoutError:
|
||||
return self.async_abort(reason="authorize_url_timeout")
|
||||
except NoURLAvailableError:
|
||||
return self.async_abort(
|
||||
reason="no_url_available",
|
||||
description_placeholders={
|
||||
"docs_url": "https://www.home-assistant.io/more-info/no-url-available"
|
||||
},
|
||||
)
|
||||
|
||||
url = str(URL(url).update_query(self.extra_authorize_data))
|
||||
|
||||
|
||||
@@ -67,6 +67,7 @@ from homeassistant.const import (
|
||||
CONF_UNIT_SYSTEM_METRIC,
|
||||
CONF_UNTIL,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
CONF_VARIABLES,
|
||||
CONF_WAIT_FOR_TRIGGER,
|
||||
CONF_WAIT_TEMPLATE,
|
||||
CONF_WHILE,
|
||||
@@ -81,7 +82,10 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import split_entity_id, valid_entity_id
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import template as template_helper
|
||||
from homeassistant.helpers import (
|
||||
script_variables as script_variables_helper,
|
||||
template as template_helper,
|
||||
)
|
||||
from homeassistant.helpers.logging import KeywordStyleAdapter
|
||||
from homeassistant.util import slugify as util_slugify
|
||||
import homeassistant.util.dt as dt_util
|
||||
@@ -863,6 +867,13 @@ def make_entity_service_schema(
|
||||
)
|
||||
|
||||
|
||||
SCRIPT_VARIABLES_SCHEMA = vol.All(
|
||||
vol.Schema({str: template_complex}),
|
||||
# pylint: disable=unnecessary-lambda
|
||||
lambda val: script_variables_helper.ScriptVariables(val),
|
||||
)
|
||||
|
||||
|
||||
def script_action(value: Any) -> dict:
|
||||
"""Validate a script action."""
|
||||
if not isinstance(value, dict):
|
||||
@@ -1117,6 +1128,13 @@ _SCRIPT_WAIT_FOR_TRIGGER_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
_SCRIPT_SET_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_ALIAS): string,
|
||||
vol.Required(CONF_VARIABLES): SCRIPT_VARIABLES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
SCRIPT_ACTION_DELAY = "delay"
|
||||
SCRIPT_ACTION_WAIT_TEMPLATE = "wait_template"
|
||||
SCRIPT_ACTION_CHECK_CONDITION = "condition"
|
||||
@@ -1127,6 +1145,7 @@ SCRIPT_ACTION_ACTIVATE_SCENE = "scene"
|
||||
SCRIPT_ACTION_REPEAT = "repeat"
|
||||
SCRIPT_ACTION_CHOOSE = "choose"
|
||||
SCRIPT_ACTION_WAIT_FOR_TRIGGER = "wait_for_trigger"
|
||||
SCRIPT_ACTION_VARIABLES = "variables"
|
||||
|
||||
|
||||
def determine_script_action(action: dict) -> str:
|
||||
@@ -1158,6 +1177,9 @@ def determine_script_action(action: dict) -> str:
|
||||
if CONF_WAIT_FOR_TRIGGER in action:
|
||||
return SCRIPT_ACTION_WAIT_FOR_TRIGGER
|
||||
|
||||
if CONF_VARIABLES in action:
|
||||
return SCRIPT_ACTION_VARIABLES
|
||||
|
||||
return SCRIPT_ACTION_CALL_SERVICE
|
||||
|
||||
|
||||
@@ -1172,4 +1194,5 @@ ACTION_TYPE_SCHEMAS: Dict[str, Callable[[Any], dict]] = {
|
||||
SCRIPT_ACTION_REPEAT: _SCRIPT_REPEAT_SCHEMA,
|
||||
SCRIPT_ACTION_CHOOSE: _SCRIPT_CHOOSE_SCHEMA,
|
||||
SCRIPT_ACTION_WAIT_FOR_TRIGGER: _SCRIPT_WAIT_FOR_TRIGGER_SCHEMA,
|
||||
SCRIPT_ACTION_VARIABLES: _SCRIPT_SET_SCHEMA,
|
||||
}
|
||||
|
||||
@@ -595,3 +595,19 @@ class EntityPlatform:
|
||||
current_platform: ContextVar[Optional[EntityPlatform]] = ContextVar(
|
||||
"current_platform", default=None
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_platforms(
|
||||
hass: HomeAssistantType, integration_name: str
|
||||
) -> List[EntityPlatform]:
|
||||
"""Find existing platforms."""
|
||||
if (
|
||||
DATA_ENTITY_PLATFORM not in hass.data
|
||||
or integration_name not in hass.data[DATA_ENTITY_PLATFORM]
|
||||
):
|
||||
return []
|
||||
|
||||
platforms: List[EntityPlatform] = hass.data[DATA_ENTITY_PLATFORM][integration_name]
|
||||
|
||||
return platforms
|
||||
|
||||
@@ -508,6 +508,7 @@ class _TrackTemplateResultInfo:
|
||||
self._info: Dict[Template, RenderInfo] = {}
|
||||
self._last_domains: Set = set()
|
||||
self._last_entities: Set = set()
|
||||
self._entity_ids_filter: Set = set()
|
||||
|
||||
def async_setup(self) -> None:
|
||||
"""Activation of template tracking."""
|
||||
@@ -659,12 +660,27 @@ class _TrackTemplateResultInfo:
|
||||
"""Force recalculate the template."""
|
||||
self._refresh(None)
|
||||
|
||||
@callback
|
||||
def async_update_entity_ids_filter(self, entity_ids: Set) -> None:
|
||||
"""Update the filtered entity_ids."""
|
||||
self._entity_ids_filter = entity_ids
|
||||
|
||||
@callback
|
||||
def _refresh(self, event: Optional[Event]) -> None:
|
||||
entity_id = event and event.data.get(ATTR_ENTITY_ID)
|
||||
updates = []
|
||||
info_changed = False
|
||||
|
||||
if entity_id and entity_id in self._entity_ids_filter:
|
||||
# Skip self-referencing updates
|
||||
for track_template_ in self._track_templates:
|
||||
_LOGGER.warning(
|
||||
"Template loop detected while processing event: %s, skipping template render for Template[%s]",
|
||||
event,
|
||||
track_template_.template.template,
|
||||
)
|
||||
return
|
||||
|
||||
for track_template_ in self._track_templates:
|
||||
template = track_template_.template
|
||||
if (
|
||||
|
||||
@@ -75,6 +75,38 @@ def get_url(
|
||||
except NoURLAvailableError:
|
||||
pass
|
||||
|
||||
# For current request, we accept loopback interfaces (e.g., 127.0.0.1),
|
||||
# the Supervisor hostname and localhost transparently
|
||||
request_host = _get_request_host()
|
||||
if (
|
||||
require_current_request
|
||||
and request_host is not None
|
||||
and hass.config.api is not None
|
||||
):
|
||||
scheme = "https" if hass.config.api.use_ssl else "http"
|
||||
current_url = yarl.URL.build(
|
||||
scheme=scheme, host=request_host, port=hass.config.api.port
|
||||
)
|
||||
|
||||
known_hostname = None
|
||||
if hass.components.hassio.is_hassio():
|
||||
host_info = hass.components.hassio.get_host_info()
|
||||
known_hostname = f"{host_info['hostname']}.local"
|
||||
|
||||
if (
|
||||
(
|
||||
(
|
||||
allow_ip
|
||||
and is_ip_address(request_host)
|
||||
and is_loopback(ip_address(request_host))
|
||||
)
|
||||
or request_host in ["localhost", known_hostname]
|
||||
)
|
||||
and (not require_ssl or current_url.scheme == "https")
|
||||
and (not require_standard_port or current_url.is_default_port())
|
||||
):
|
||||
return normalize_url(str(current_url))
|
||||
|
||||
# We have to be honest now, we have no viable option available
|
||||
raise NoURLAvailableError
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from homeassistant.const import SERVICE_RELOAD
|
||||
from homeassistant.core import Event, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_per_platform
|
||||
from homeassistant.helpers.entity_platform import DATA_ENTITY_PLATFORM, EntityPlatform
|
||||
from homeassistant.helpers.entity_platform import EntityPlatform, async_get_platforms
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
from homeassistant.loader import async_get_integration
|
||||
from homeassistant.setup import async_setup_component
|
||||
@@ -141,13 +141,7 @@ def async_get_platform(
|
||||
hass: HomeAssistantType, integration_name: str, integration_platform_name: str
|
||||
) -> Optional[EntityPlatform]:
|
||||
"""Find an existing platform."""
|
||||
if (
|
||||
DATA_ENTITY_PLATFORM not in hass.data
|
||||
or integration_name not in hass.data[DATA_ENTITY_PLATFORM]
|
||||
):
|
||||
return None
|
||||
|
||||
for integration_platform in hass.data[DATA_ENTITY_PLATFORM][integration_name]:
|
||||
for integration_platform in async_get_platforms(hass, integration_name):
|
||||
if integration_platform.domain == integration_platform_name:
|
||||
platform: EntityPlatform = integration_platform
|
||||
return platform
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Helpers to execute scripts."""
|
||||
import asyncio
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import itertools
|
||||
@@ -47,6 +46,7 @@ from homeassistant.const import (
|
||||
CONF_SEQUENCE,
|
||||
CONF_TIMEOUT,
|
||||
CONF_UNTIL,
|
||||
CONF_VARIABLES,
|
||||
CONF_WAIT_FOR_TRIGGER,
|
||||
CONF_WAIT_TEMPLATE,
|
||||
CONF_WHILE,
|
||||
@@ -54,12 +54,9 @@ from homeassistant.const import (
|
||||
SERVICE_TURN_ON,
|
||||
)
|
||||
from homeassistant.core import SERVICE_CALL_LIMIT, Context, HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
condition,
|
||||
config_validation as cv,
|
||||
template as template,
|
||||
)
|
||||
from homeassistant.helpers import condition, config_validation as cv, template
|
||||
from homeassistant.helpers.event import async_call_later, async_track_template
|
||||
from homeassistant.helpers.script_variables import ScriptVariables
|
||||
from homeassistant.helpers.service import (
|
||||
CONF_SERVICE_DATA,
|
||||
async_prepare_call_from_config,
|
||||
@@ -572,7 +569,7 @@ class _ScriptRun:
|
||||
"" if delay is None else f" (timeout: {timedelta(seconds=delay)})",
|
||||
)
|
||||
|
||||
variables = deepcopy(self._variables)
|
||||
variables = {**self._variables}
|
||||
self._variables["wait"] = {"remaining": delay, "trigger": None}
|
||||
|
||||
async def async_done(variables, context=None):
|
||||
@@ -616,6 +613,14 @@ class _ScriptRun:
|
||||
task.cancel()
|
||||
remove_triggers()
|
||||
|
||||
async def _async_variables_step(self):
|
||||
"""Set a variable value."""
|
||||
self._script.last_action = self._action.get(CONF_ALIAS, "setting variables")
|
||||
self._log("Executing step %s", self._script.last_action)
|
||||
self._variables = self._action[CONF_VARIABLES].async_render(
|
||||
self._hass, self._variables, render_as_defaults=False
|
||||
)
|
||||
|
||||
async def _async_run_script(self, script):
|
||||
"""Execute a script."""
|
||||
await self._async_run_long_action(
|
||||
@@ -722,6 +727,7 @@ class Script:
|
||||
logger: Optional[logging.Logger] = None,
|
||||
log_exceptions: bool = True,
|
||||
top_level: bool = True,
|
||||
variables: Optional[ScriptVariables] = None,
|
||||
) -> None:
|
||||
"""Initialize the script."""
|
||||
all_scripts = hass.data.get(DATA_SCRIPTS)
|
||||
@@ -760,6 +766,10 @@ class Script:
|
||||
self._choose_data: Dict[int, Dict[str, Any]] = {}
|
||||
self._referenced_entities: Optional[Set[str]] = None
|
||||
self._referenced_devices: Optional[Set[str]] = None
|
||||
self.variables = variables
|
||||
self._variables_dynamic = template.is_complex(variables)
|
||||
if self._variables_dynamic:
|
||||
template.attach(hass, variables)
|
||||
|
||||
def _set_logger(self, logger: Optional[logging.Logger] = None) -> None:
|
||||
if logger:
|
||||
@@ -868,7 +878,7 @@ class Script:
|
||||
|
||||
async def async_run(
|
||||
self,
|
||||
variables: Optional[_VarsType] = None,
|
||||
run_variables: Optional[_VarsType] = None,
|
||||
context: Optional[Context] = None,
|
||||
started_action: Optional[Callable[..., Any]] = None,
|
||||
) -> None:
|
||||
@@ -899,8 +909,23 @@ class Script:
|
||||
# are read-only, but more importantly, so as not to leak any variables created
|
||||
# during the run back to the caller.
|
||||
if self._top_level:
|
||||
variables = dict(variables) if variables is not None else {}
|
||||
if self.variables:
|
||||
try:
|
||||
variables = self.variables.async_render(
|
||||
self._hass,
|
||||
run_variables,
|
||||
)
|
||||
except template.TemplateError as err:
|
||||
self._log("Error rendering variables: %s", err, level=logging.ERROR)
|
||||
raise
|
||||
elif run_variables:
|
||||
variables = dict(run_variables)
|
||||
else:
|
||||
variables = {}
|
||||
|
||||
variables["context"] = context
|
||||
else:
|
||||
variables = cast(dict, run_variables)
|
||||
|
||||
if self.script_mode != SCRIPT_MODE_QUEUED:
|
||||
cls = _ScriptRun
|
||||
|
||||
64
homeassistant/helpers/script_variables.py
Normal file
64
homeassistant/helpers/script_variables.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Script variables."""
|
||||
from typing import Any, Dict, Mapping, Optional
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from . import template
|
||||
|
||||
|
||||
class ScriptVariables:
|
||||
"""Class to hold and render script variables."""
|
||||
|
||||
def __init__(self, variables: Dict[str, Any]):
|
||||
"""Initialize script variables."""
|
||||
self.variables = variables
|
||||
self._has_template: Optional[bool] = None
|
||||
|
||||
@callback
|
||||
def async_render(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
run_variables: Optional[Mapping[str, Any]],
|
||||
*,
|
||||
render_as_defaults: bool = True,
|
||||
) -> Dict[str, Any]:
|
||||
"""Render script variables.
|
||||
|
||||
The run variables are used to compute the static variables.
|
||||
|
||||
If `render_as_defaults` is True, the run variables will not be overridden.
|
||||
|
||||
"""
|
||||
if self._has_template is None:
|
||||
self._has_template = template.is_complex(self.variables)
|
||||
template.attach(hass, self.variables)
|
||||
|
||||
if not self._has_template:
|
||||
if render_as_defaults:
|
||||
rendered_variables = dict(self.variables)
|
||||
|
||||
if run_variables is not None:
|
||||
rendered_variables.update(run_variables)
|
||||
else:
|
||||
rendered_variables = (
|
||||
{} if run_variables is None else dict(run_variables)
|
||||
)
|
||||
rendered_variables.update(self.variables)
|
||||
|
||||
return rendered_variables
|
||||
|
||||
rendered_variables = {} if run_variables is None else dict(run_variables)
|
||||
|
||||
for key, value in self.variables.items():
|
||||
# We can skip if we're going to override this key with
|
||||
# run variables anyway
|
||||
if render_as_defaults and key in rendered_variables:
|
||||
continue
|
||||
|
||||
rendered_variables[key] = template.render_complex(value, rendered_variables)
|
||||
|
||||
return rendered_variables
|
||||
|
||||
def as_dict(self) -> dict:
|
||||
"""Return dict version of this class."""
|
||||
return self.variables
|
||||
@@ -65,7 +65,7 @@ def attach(hass: HomeAssistantType, obj: Any) -> None:
|
||||
if isinstance(obj, list):
|
||||
for child in obj:
|
||||
attach(hass, child)
|
||||
elif isinstance(obj, dict):
|
||||
elif isinstance(obj, collections.abc.Mapping):
|
||||
for child_key, child_value in obj.items():
|
||||
attach(hass, child_key)
|
||||
attach(hass, child_value)
|
||||
@@ -77,7 +77,7 @@ def render_complex(value: Any, variables: TemplateVarsType = None) -> Any:
|
||||
"""Recursive template creator helper function."""
|
||||
if isinstance(value, list):
|
||||
return [render_complex(item, variables) for item in value]
|
||||
if isinstance(value, dict):
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
return {
|
||||
render_complex(key, variables): render_complex(item, variables)
|
||||
for key, item in value.items()
|
||||
@@ -88,6 +88,19 @@ def render_complex(value: Any, variables: TemplateVarsType = None) -> Any:
|
||||
return value
|
||||
|
||||
|
||||
def is_complex(value: Any) -> bool:
|
||||
"""Test if data structure is a complex template."""
|
||||
if isinstance(value, Template):
|
||||
return True
|
||||
if isinstance(value, list):
|
||||
return any(is_complex(val) for val in value)
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
return any(is_complex(val) for val in value.keys()) or any(
|
||||
is_complex(val) for val in value.values()
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def is_template_string(maybe_template: str) -> bool:
|
||||
"""Check if the input is a Jinja2 template."""
|
||||
return _RE_JINJA_DELIMITERS.search(maybe_template) is not None
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Typing Helpers for Home Assistant."""
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
from typing import Any, Dict, Mapping, Optional, Tuple, Union
|
||||
|
||||
import homeassistant.core
|
||||
|
||||
@@ -12,7 +12,7 @@ HomeAssistantType = homeassistant.core.HomeAssistant
|
||||
ServiceCallType = homeassistant.core.ServiceCall
|
||||
ServiceDataType = Dict[str, Any]
|
||||
StateType = Union[None, str, int, float]
|
||||
TemplateVarsType = Optional[Dict[str, Any]]
|
||||
TemplateVarsType = Optional[Mapping[str, Any]]
|
||||
|
||||
# Custom type for recorder Queries
|
||||
QueryType = Any
|
||||
|
||||
@@ -145,18 +145,25 @@ async def async_get_config_flows(hass: "HomeAssistant") -> Set[str]:
|
||||
return flows
|
||||
|
||||
|
||||
async def async_get_zeroconf(hass: "HomeAssistant") -> Dict[str, List]:
|
||||
async def async_get_zeroconf(hass: "HomeAssistant") -> Dict[str, List[Dict[str, str]]]:
|
||||
"""Return cached list of zeroconf types."""
|
||||
zeroconf: Dict[str, List] = ZEROCONF.copy()
|
||||
zeroconf: Dict[str, List[Dict[str, str]]] = ZEROCONF.copy()
|
||||
|
||||
integrations = await async_get_custom_components(hass)
|
||||
for integration in integrations.values():
|
||||
if not integration.zeroconf:
|
||||
continue
|
||||
for typ in integration.zeroconf:
|
||||
zeroconf.setdefault(typ, [])
|
||||
if integration.domain not in zeroconf[typ]:
|
||||
zeroconf[typ].append(integration.domain)
|
||||
for entry in integration.zeroconf:
|
||||
data = {"domain": integration.domain}
|
||||
if isinstance(entry, dict):
|
||||
typ = entry["type"]
|
||||
entry_without_type = entry.copy()
|
||||
del entry_without_type["type"]
|
||||
data.update(entry_without_type)
|
||||
else:
|
||||
typ = entry
|
||||
|
||||
zeroconf.setdefault(typ, []).append(data)
|
||||
|
||||
return zeroconf
|
||||
|
||||
|
||||
@@ -12,8 +12,8 @@ cryptography==2.9.2
|
||||
defusedxml==0.6.0
|
||||
distro==1.5.0
|
||||
emoji==0.5.4
|
||||
hass-nabucasa==0.36.1
|
||||
home-assistant-frontend==20200907.0
|
||||
hass-nabucasa==0.37.0
|
||||
home-assistant-frontend==20200909.0
|
||||
importlib-metadata==1.6.0;python_version<'3.8'
|
||||
jinja2>=2.11.2
|
||||
netdisco==2.8.2
|
||||
@@ -29,7 +29,7 @@ sqlalchemy==1.3.19
|
||||
voluptuous-serialize==2.4.0
|
||||
voluptuous==0.11.7
|
||||
yarl==1.4.2
|
||||
zeroconf==0.28.4
|
||||
zeroconf==0.28.5
|
||||
|
||||
pycryptodome>=3.6.6
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user