mirror of
https://github.com/home-assistant/core.git
synced 2026-01-21 23:17:00 +01:00
Compare commits
9 Commits
dev
...
frontend-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
698412a4d3 | ||
|
|
20d14a6e00 | ||
|
|
900c2f881e | ||
|
|
2cf406f66c | ||
|
|
7bba6f0e26 | ||
|
|
460fa5a052 | ||
|
|
bebc6a63f1 | ||
|
|
689415d021 | ||
|
|
07de504f54 |
7
.github/workflows/ci.yaml
vendored
7
.github/workflows/ci.yaml
vendored
@@ -1187,8 +1187,6 @@ jobs:
|
||||
- pytest-postgres
|
||||
- pytest-mariadb
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
id-token: write
|
||||
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||
# therefore we can't run it on forks
|
||||
if: |
|
||||
@@ -1200,9 +1198,8 @@ jobs:
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
|
||||
with:
|
||||
report_type: test_results
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
use_oidc: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.13
|
||||
rev: v0.13.0
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
||||
@@ -52,7 +52,7 @@ class AdGuardHomeEntity(Entity):
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device information about this AdGuard Home instance."""
|
||||
if self._entry.source == SOURCE_HASSIO:
|
||||
config_url = "homeassistant://app/a0d7b954_adguard"
|
||||
config_url = "homeassistant://hassio/ingress/a0d7b954_adguard"
|
||||
elif self.adguard.tls:
|
||||
config_url = f"https://{self.adguard.host}:{self.adguard.port}"
|
||||
else:
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed"
|
||||
"name": "If an alarm is armed"
|
||||
},
|
||||
"is_armed_away": {
|
||||
"description": "Tests if one or more alarms are armed in away mode.",
|
||||
@@ -24,7 +24,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed away"
|
||||
"name": "If an alarm is armed away"
|
||||
},
|
||||
"is_armed_home": {
|
||||
"description": "Tests if one or more alarms are armed in home mode.",
|
||||
@@ -34,7 +34,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed home"
|
||||
"name": "If an alarm is armed home"
|
||||
},
|
||||
"is_armed_night": {
|
||||
"description": "Tests if one or more alarms are armed in night mode.",
|
||||
@@ -44,7 +44,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed night"
|
||||
"name": "If an alarm is armed night"
|
||||
},
|
||||
"is_armed_vacation": {
|
||||
"description": "Tests if one or more alarms are armed in vacation mode.",
|
||||
@@ -54,7 +54,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed vacation"
|
||||
"name": "If an alarm is armed vacation"
|
||||
},
|
||||
"is_disarmed": {
|
||||
"description": "Tests if one or more alarms are disarmed.",
|
||||
@@ -64,7 +64,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is disarmed"
|
||||
"name": "If an alarm is disarmed"
|
||||
},
|
||||
"is_triggered": {
|
||||
"description": "Tests if one or more alarms are triggered.",
|
||||
@@ -74,7 +74,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is triggered"
|
||||
"name": "If an alarm is triggered"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
||||
@@ -5,14 +5,9 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import logging
|
||||
from random import randrange
|
||||
import sys
|
||||
from typing import Any, cast
|
||||
|
||||
from pyatv import connect, exceptions, scan
|
||||
from pyatv.conf import AppleTV
|
||||
from pyatv.const import DeviceModel, Protocol
|
||||
from pyatv.convert import model_str
|
||||
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -29,7 +24,11 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
@@ -43,6 +42,18 @@ from .const import (
|
||||
SIGNAL_DISCONNECTED,
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
from pyatv import connect, exceptions, scan
|
||||
from pyatv.conf import AppleTV
|
||||
from pyatv.const import DeviceModel, Protocol
|
||||
from pyatv.convert import model_str
|
||||
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
|
||||
else:
|
||||
|
||||
class DeviceListener:
|
||||
"""Dummy class."""
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME_TV = "Apple TV"
|
||||
@@ -53,25 +64,30 @@ BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
|
||||
AUTH_EXCEPTIONS = (
|
||||
exceptions.AuthenticationError,
|
||||
exceptions.InvalidCredentialsError,
|
||||
exceptions.NoCredentialsError,
|
||||
)
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
||||
OSError,
|
||||
asyncio.CancelledError,
|
||||
TimeoutError,
|
||||
exceptions.ConnectionLostError,
|
||||
exceptions.ConnectionFailedError,
|
||||
)
|
||||
DEVICE_EXCEPTIONS = (
|
||||
exceptions.ProtocolError,
|
||||
exceptions.NoServiceError,
|
||||
exceptions.PairingError,
|
||||
exceptions.BackOffError,
|
||||
exceptions.DeviceIdMissingError,
|
||||
)
|
||||
if sys.version_info < (3, 14):
|
||||
AUTH_EXCEPTIONS = (
|
||||
exceptions.AuthenticationError,
|
||||
exceptions.InvalidCredentialsError,
|
||||
exceptions.NoCredentialsError,
|
||||
)
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
||||
OSError,
|
||||
asyncio.CancelledError,
|
||||
TimeoutError,
|
||||
exceptions.ConnectionLostError,
|
||||
exceptions.ConnectionFailedError,
|
||||
)
|
||||
DEVICE_EXCEPTIONS = (
|
||||
exceptions.ProtocolError,
|
||||
exceptions.NoServiceError,
|
||||
exceptions.PairingError,
|
||||
exceptions.BackOffError,
|
||||
exceptions.DeviceIdMissingError,
|
||||
)
|
||||
else:
|
||||
AUTH_EXCEPTIONS = ()
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = ()
|
||||
DEVICE_EXCEPTIONS = ()
|
||||
|
||||
|
||||
type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
||||
@@ -79,6 +95,10 @@ type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool:
|
||||
"""Set up a config entry for Apple TV."""
|
||||
if sys.version_info >= (3, 14):
|
||||
raise HomeAssistantError(
|
||||
"Apple TV is not supported on Python 3.14. Please use Python 3.13."
|
||||
)
|
||||
manager = AppleTVManager(hass, entry)
|
||||
|
||||
if manager.is_on:
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.17.0"],
|
||||
"requirements": ["pyatv==0.16.1;python_version<'3.14'"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
||||
@@ -239,15 +239,6 @@ class AppleTvMediaPlayer(
|
||||
"""
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def volume_device_update(
|
||||
self, output_device: OutputDevice, old_level: float, new_level: float
|
||||
) -> None:
|
||||
"""Output device volume was updated.
|
||||
|
||||
This is a callback function from pyatv.interface.AudioListener.
|
||||
"""
|
||||
|
||||
@callback
|
||||
def outputdevices_update(
|
||||
self, old_devices: list[OutputDevice], new_devices: list[OutputDevice]
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is idle"
|
||||
"name": "If a satellite is idle"
|
||||
},
|
||||
"is_listening": {
|
||||
"description": "Tests if one or more Assist satellites are listening.",
|
||||
@@ -24,7 +24,7 @@
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is listening"
|
||||
"name": "If a satellite is listening"
|
||||
},
|
||||
"is_processing": {
|
||||
"description": "Tests if one or more Assist satellites are processing.",
|
||||
@@ -34,7 +34,7 @@
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is processing"
|
||||
"name": "If a satellite is processing"
|
||||
},
|
||||
"is_responding": {
|
||||
"description": "Tests if one or more Assist satellites are responding.",
|
||||
@@ -44,7 +44,7 @@
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is responding"
|
||||
"name": "If a satellite is responding"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
|
||||
@@ -56,7 +56,7 @@ from homeassistant.core import (
|
||||
valid_entity_id,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError
|
||||
from homeassistant.helpers import condition as condition_helper, config_validation as cv
|
||||
from homeassistant.helpers import condition, config_validation as cv
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
@@ -127,7 +127,6 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"assist_satellite",
|
||||
"fan",
|
||||
"light",
|
||||
"siren",
|
||||
}
|
||||
|
||||
_EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
@@ -554,7 +553,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
automation_id: str | None,
|
||||
name: str,
|
||||
trigger_config: list[ConfigType],
|
||||
condition: IfAction | None,
|
||||
cond_func: IfAction | None,
|
||||
action_script: Script,
|
||||
initial_state: bool | None,
|
||||
variables: ScriptVariables | None,
|
||||
@@ -567,7 +566,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
self._attr_name = name
|
||||
self._trigger_config = trigger_config
|
||||
self._async_detach_triggers: CALLBACK_TYPE | None = None
|
||||
self._condition = condition
|
||||
self._cond_func = cond_func
|
||||
self.action_script = action_script
|
||||
self.action_script.change_listener = self.async_write_ha_state
|
||||
self._initial_state = initial_state
|
||||
@@ -602,12 +601,6 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced labels."""
|
||||
referenced = self.action_script.referenced_labels
|
||||
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(
|
||||
conf, ATTR_LABEL_ID
|
||||
)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
|
||||
return referenced
|
||||
@@ -617,12 +610,6 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced floors."""
|
||||
referenced = self.action_script.referenced_floors
|
||||
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(
|
||||
conf, ATTR_FLOOR_ID
|
||||
)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
|
||||
return referenced
|
||||
@@ -632,10 +619,6 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced areas."""
|
||||
referenced = self.action_script.referenced_areas
|
||||
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(conf, ATTR_AREA_ID)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
|
||||
return referenced
|
||||
@@ -652,9 +635,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced devices."""
|
||||
referenced = self.action_script.referenced_devices
|
||||
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_devices(conf)
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_devices(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_trigger_extract_devices(conf))
|
||||
@@ -666,9 +649,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced entities."""
|
||||
referenced = self.action_script.referenced_entities
|
||||
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_entities(conf)
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_entities(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
for entity_id in _trigger_extract_entities(conf):
|
||||
@@ -788,8 +771,8 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
|
||||
if (
|
||||
not skip_condition
|
||||
and self._condition is not None
|
||||
and not self._condition(variables)
|
||||
and self._cond_func is not None
|
||||
and not self._cond_func(variables)
|
||||
):
|
||||
self._logger.debug(
|
||||
"Conditions not met, aborting automation. Condition summary: %s",
|
||||
@@ -1051,12 +1034,12 @@ async def _create_automation_entities(
|
||||
)
|
||||
|
||||
if CONF_CONDITIONS in config_block:
|
||||
condition = await _async_process_if(hass, name, config_block)
|
||||
cond_func = await _async_process_if(hass, name, config_block)
|
||||
|
||||
if condition is None:
|
||||
if cond_func is None:
|
||||
continue
|
||||
else:
|
||||
condition = None
|
||||
cond_func = None
|
||||
|
||||
# Add trigger variables to variables
|
||||
variables = None
|
||||
@@ -1074,7 +1057,7 @@ async def _create_automation_entities(
|
||||
automation_id,
|
||||
name,
|
||||
config_block[CONF_TRIGGERS],
|
||||
condition,
|
||||
cond_func,
|
||||
action_script,
|
||||
initial_state,
|
||||
variables,
|
||||
@@ -1216,7 +1199,7 @@ async def _async_process_if(
|
||||
if_configs = config[CONF_CONDITIONS]
|
||||
|
||||
try:
|
||||
if_action = await condition_helper.async_conditions_from_config(
|
||||
if_action = await condition.async_conditions_from_config(
|
||||
hass, if_configs, LOGGER, name
|
||||
)
|
||||
except HomeAssistantError as ex:
|
||||
|
||||
@@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
"frontend_development_artifacts/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.11.0"],
|
||||
"requirements": ["hass-nabucasa==1.10.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "deconz"
|
||||
|
||||
HASSIO_CONFIGURATION_URL = "homeassistant://app/core_deconz"
|
||||
HASSIO_CONFIGURATION_URL = "homeassistant://hassio/ingress/core_deconz"
|
||||
|
||||
CONF_BRIDGE_ID = "bridgeid"
|
||||
CONF_GROUP_ID_BASE = "group_id_base"
|
||||
|
||||
@@ -1034,7 +1034,7 @@ def _async_setup_device_registry(
|
||||
and dashboard.data
|
||||
and dashboard.data.get(device_info.name)
|
||||
):
|
||||
configuration_url = f"homeassistant://app/{dashboard.addon_slug}"
|
||||
configuration_url = f"homeassistant://hassio/ingress/{dashboard.addon_slug}"
|
||||
|
||||
manufacturer = "espressif"
|
||||
if device_info.manufacturer:
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"name": "[%key:component::fan::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Fan is off"
|
||||
"name": "If a fan is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more fans are on.",
|
||||
@@ -24,7 +24,7 @@
|
||||
"name": "[%key:component::fan::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Fan is on"
|
||||
"name": "If a fan is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.12"]
|
||||
"requirements": ["pyfirefly==0.1.11"]
|
||||
}
|
||||
|
||||
@@ -52,6 +52,10 @@ CONF_EXTRA_MODULE_URL = "extra_module_url"
|
||||
CONF_EXTRA_JS_URL_ES5 = "extra_js_url_es5"
|
||||
CONF_FRONTEND_REPO = "development_repo"
|
||||
CONF_JS_VERSION = "javascript_version"
|
||||
CONF_DEVELOPMENT_PR = "development_pr"
|
||||
CONF_GITHUB_TOKEN = "github_token"
|
||||
|
||||
PR_CACHE_DIR = "frontend_development_artifacts"
|
||||
|
||||
DEFAULT_THEME_COLOR = "#2980b9"
|
||||
|
||||
@@ -129,7 +133,9 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_FRONTEND_REPO): cv.isdir,
|
||||
vol.Optional(CONF_FRONTEND_REPO): cv.path,
|
||||
vol.Optional(CONF_DEVELOPMENT_PR): cv.positive_int,
|
||||
vol.Optional(CONF_GITHUB_TOKEN): cv.string,
|
||||
vol.Optional(CONF_THEMES): vol.All(dict, _validate_themes),
|
||||
vol.Optional(CONF_EXTRA_MODULE_URL): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
@@ -394,7 +400,17 @@ def add_manifest_json_key(key: str, val: Any) -> None:
|
||||
def _frontend_root(dev_repo_path: str | None) -> pathlib.Path:
|
||||
"""Return root path to the frontend files."""
|
||||
if dev_repo_path is not None:
|
||||
return pathlib.Path(dev_repo_path) / "hass_frontend"
|
||||
dev_frontend_path = pathlib.Path(dev_repo_path) / "hass_frontend"
|
||||
if dev_frontend_path.exists() and dev_frontend_path.is_dir():
|
||||
_LOGGER.info("Using frontend development repo: %s", dev_repo_path)
|
||||
return dev_frontend_path
|
||||
|
||||
_LOGGER.error(
|
||||
"Frontend development repo path does not exist: %s, "
|
||||
"falling back to the integrated frontend",
|
||||
dev_repo_path,
|
||||
)
|
||||
|
||||
# Keep import here so that we can import frontend without installing reqs
|
||||
import hass_frontend # noqa: PLC0415
|
||||
|
||||
@@ -421,7 +437,43 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
key,
|
||||
)
|
||||
|
||||
# Handle development configuration with priority
|
||||
repo_path = conf.get(CONF_FRONTEND_REPO)
|
||||
dev_pr_number = conf.get(CONF_DEVELOPMENT_PR)
|
||||
|
||||
# Priority: development_repo > development_pr > integrated
|
||||
if repo_path and dev_pr_number:
|
||||
_LOGGER.warning(
|
||||
"Both development_repo and development_pr are configured. "
|
||||
"Using development_repo (takes precedence). "
|
||||
"Remove development_repo to use automatic PR download"
|
||||
)
|
||||
dev_pr_number = None # Disable PR download
|
||||
|
||||
if dev_pr_number:
|
||||
pr_cache_dir = pathlib.Path(hass.config.config_dir) / PR_CACHE_DIR
|
||||
github_token = conf.get(CONF_GITHUB_TOKEN)
|
||||
|
||||
# Keep import here so that we can import frontend without installing reqs
|
||||
from .pr_download import download_pr_artifact # noqa: PLC0415
|
||||
|
||||
# Download PR artifact
|
||||
dev_pr_dir = await download_pr_artifact(
|
||||
hass, dev_pr_number, github_token, pr_cache_dir
|
||||
)
|
||||
|
||||
if dev_pr_dir is None:
|
||||
_LOGGER.error(
|
||||
"Failed to download PR #%s, falling back to the integrated frontend",
|
||||
dev_pr_number,
|
||||
)
|
||||
repo_path = None
|
||||
else:
|
||||
# frontend_dir is .../frontend_development_artifacts/<pr_number>/hass_frontend
|
||||
# We need to pass .../frontend_development_artifacts/<pr_number> to _frontend_root
|
||||
repo_path = str(dev_pr_dir.parent)
|
||||
_LOGGER.info("Using frontend from PR #%s", dev_pr_number)
|
||||
|
||||
is_dev = repo_path is not None
|
||||
root_path = _frontend_root(repo_path)
|
||||
|
||||
|
||||
@@ -23,5 +23,8 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260107.2"]
|
||||
"requirements": [
|
||||
"home-assistant-frontend==20260107.2",
|
||||
"aiogithubapi==24.6.0"
|
||||
]
|
||||
}
|
||||
|
||||
259
homeassistant/components/frontend/pr_download.py
Normal file
259
homeassistant/components/frontend/pr_download.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""GitHub PR artifact download functionality for frontend development."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
import pathlib
|
||||
import shutil
|
||||
import zipfile
|
||||
|
||||
from aiogithubapi import (
|
||||
GitHubAPI,
|
||||
GitHubAuthenticationException,
|
||||
GitHubException,
|
||||
GitHubNotFoundException,
|
||||
GitHubPermissionException,
|
||||
GitHubRatelimitException,
|
||||
)
|
||||
from aiohttp import ClientError, ClientResponseError, ClientTimeout
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
GITHUB_REPO = "home-assistant/frontend"
|
||||
ARTIFACT_NAME = "frontend-build"
|
||||
CACHE_WARNING_SIZE_MB = 500
|
||||
|
||||
# Error messages
|
||||
ERROR_INVALID_TOKEN = (
|
||||
"GitHub token is invalid or expired. "
|
||||
"Please check your github_token in the frontend configuration. "
|
||||
"Generate a new token at https://github.com/settings/tokens"
|
||||
)
|
||||
ERROR_RATE_LIMIT = (
|
||||
"GitHub API rate limit exceeded or token lacks permissions. "
|
||||
"Ensure your token has 'repo' or 'public_repo' scope"
|
||||
)
|
||||
|
||||
|
||||
def _get_directory_size_mb(directory: pathlib.Path) -> float:
|
||||
"""Calculate total size of directory in MB (runs in executor)."""
|
||||
total = sum(f.stat().st_size for f in directory.rglob("*") if f.is_file())
|
||||
return total / (1024 * 1024)
|
||||
|
||||
|
||||
async def _get_pr_head_sha(client: GitHubAPI, pr_number: int) -> str:
|
||||
"""Get the head SHA for the PR."""
|
||||
try:
|
||||
response = await client.generic(
|
||||
endpoint=f"/repos/home-assistant/frontend/pulls/{pr_number}",
|
||||
)
|
||||
return str(response.data["head"]["sha"])
|
||||
except GitHubAuthenticationException as err:
|
||||
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
|
||||
except (GitHubRatelimitException, GitHubPermissionException) as err:
|
||||
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
|
||||
except GitHubNotFoundException as err:
|
||||
raise HomeAssistantError(
|
||||
f"PR #{pr_number} does not exist in repository {GITHUB_REPO}"
|
||||
) from err
|
||||
except GitHubException as err:
|
||||
raise HomeAssistantError(f"GitHub API error: {err}") from err
|
||||
|
||||
|
||||
async def _find_pr_artifact(client: GitHubAPI, pr_number: int, head_sha: str) -> str:
|
||||
"""Find the build artifact for the given PR and commit SHA.
|
||||
|
||||
Returns the artifact download URL.
|
||||
"""
|
||||
try:
|
||||
# Get workflow runs for the commit
|
||||
response = await client.generic(
|
||||
endpoint="/repos/home-assistant/frontend/actions/workflows/ci.yaml/runs",
|
||||
params={"head_sha": head_sha, "per_page": 10},
|
||||
)
|
||||
|
||||
# Find the most recent successful run for this commit
|
||||
for run in response.data.get("workflow_runs", []):
|
||||
if run["status"] == "completed" and run["conclusion"] == "success":
|
||||
# Get artifacts for this run
|
||||
artifacts_response = await client.generic(
|
||||
endpoint=f"/repos/home-assistant/frontend/actions/runs/{run['id']}/artifacts",
|
||||
)
|
||||
|
||||
# Find the frontend-build artifact
|
||||
for artifact in artifacts_response.data.get("artifacts", []):
|
||||
if artifact["name"] == ARTIFACT_NAME:
|
||||
_LOGGER.info(
|
||||
"Found artifact '%s' from CI run #%s",
|
||||
ARTIFACT_NAME,
|
||||
run["id"],
|
||||
)
|
||||
return str(artifact["archive_download_url"])
|
||||
|
||||
raise HomeAssistantError(
|
||||
f"No '{ARTIFACT_NAME}' artifact found for PR #{pr_number}. "
|
||||
"Possible reasons: CI has not run yet or is running, "
|
||||
"or the build failed, or the PR artifact expired. "
|
||||
f"Check https://github.com/{GITHUB_REPO}/pull/{pr_number}/checks"
|
||||
)
|
||||
except GitHubAuthenticationException as err:
|
||||
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
|
||||
except (GitHubRatelimitException, GitHubPermissionException) as err:
|
||||
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
|
||||
except GitHubException as err:
|
||||
raise HomeAssistantError(f"GitHub API error: {err}") from err
|
||||
|
||||
|
||||
async def _download_artifact_data(
|
||||
hass: HomeAssistant, artifact_url: str, github_token: str
|
||||
) -> bytes:
|
||||
"""Download artifact data from GitHub."""
|
||||
session = async_get_clientsession(hass)
|
||||
headers = {
|
||||
"Authorization": f"token {github_token}",
|
||||
"Accept": "application/vnd.github+json",
|
||||
}
|
||||
|
||||
try:
|
||||
response = await session.get(
|
||||
artifact_url, headers=headers, timeout=ClientTimeout(total=60)
|
||||
)
|
||||
response.raise_for_status()
|
||||
return await response.read()
|
||||
except ClientResponseError as err:
|
||||
if err.status == 401:
|
||||
raise HomeAssistantError(ERROR_INVALID_TOKEN) from err
|
||||
if err.status == 403:
|
||||
raise HomeAssistantError(ERROR_RATE_LIMIT) from err
|
||||
raise HomeAssistantError(
|
||||
f"Failed to download artifact: HTTP {err.status}"
|
||||
) from err
|
||||
except TimeoutError as err:
|
||||
raise HomeAssistantError(
|
||||
"Timeout downloading artifact (>60s). Check your network connection"
|
||||
) from err
|
||||
except ClientError as err:
|
||||
raise HomeAssistantError(f"Network error downloading artifact: {err}") from err
|
||||
|
||||
|
||||
def _extract_artifact(
|
||||
artifact_data: bytes,
|
||||
pr_dir: pathlib.Path,
|
||||
frontend_dir: pathlib.Path,
|
||||
head_sha: str,
|
||||
) -> None:
|
||||
"""Extract artifact and save SHA (runs in executor)."""
|
||||
if pr_dir.exists():
|
||||
shutil.rmtree(pr_dir)
|
||||
frontend_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with zipfile.ZipFile(io.BytesIO(artifact_data)) as zip_file:
|
||||
zip_file.extractall(str(frontend_dir))
|
||||
|
||||
# Save the commit SHA for cache validation
|
||||
sha_file = pr_dir / ".sha"
|
||||
sha_file.write_text(head_sha)
|
||||
|
||||
|
||||
async def download_pr_artifact(
|
||||
hass: HomeAssistant,
|
||||
pr_number: int,
|
||||
github_token: str | None,
|
||||
cache_dir: pathlib.Path,
|
||||
) -> pathlib.Path | None:
|
||||
"""Download and extract frontend PR artifact from GitHub.
|
||||
|
||||
Returns the path to the extracted hass_frontend directory, or None on failure.
|
||||
"""
|
||||
# GitHub token is required to download artifacts
|
||||
if not github_token:
|
||||
_LOGGER.error(
|
||||
"GitHub token is required to download PR artifacts. "
|
||||
"Add 'github_token' to your frontend configuration"
|
||||
)
|
||||
return None
|
||||
|
||||
# Create GitHub API client
|
||||
client = GitHubAPI(
|
||||
token=github_token,
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
# Get the current head SHA for this PR
|
||||
try:
|
||||
head_sha = await _get_pr_head_sha(client, pr_number)
|
||||
except HomeAssistantError as err:
|
||||
_LOGGER.error("%s", err)
|
||||
return None
|
||||
|
||||
# Check if we have this exact version cached
|
||||
pr_dir = cache_dir / str(pr_number)
|
||||
frontend_dir = pr_dir / "hass_frontend"
|
||||
sha_file = pr_dir / ".sha"
|
||||
|
||||
# Check if cached version matches current commit
|
||||
if frontend_dir.exists() and sha_file.exists():
|
||||
cached_sha = await hass.async_add_executor_job(sha_file.read_text)
|
||||
if cached_sha.strip() == head_sha:
|
||||
_LOGGER.info(
|
||||
"Using cached PR #%s (commit %s) from %s",
|
||||
pr_number,
|
||||
head_sha[:8],
|
||||
pr_dir,
|
||||
)
|
||||
return frontend_dir
|
||||
_LOGGER.info(
|
||||
"PR #%s has new commits (cached: %s, current: %s), re-downloading",
|
||||
pr_number,
|
||||
cached_sha[:8],
|
||||
head_sha[:8],
|
||||
)
|
||||
|
||||
try:
|
||||
# Find the artifact
|
||||
artifact_url = await _find_pr_artifact(client, pr_number, head_sha)
|
||||
|
||||
# Download artifact
|
||||
_LOGGER.info("Downloading frontend PR #%s artifact", pr_number)
|
||||
artifact_data = await _download_artifact_data(hass, artifact_url, github_token)
|
||||
|
||||
# Extract artifact
|
||||
await hass.async_add_executor_job(
|
||||
_extract_artifact, artifact_data, pr_dir, frontend_dir, head_sha
|
||||
)
|
||||
|
||||
_LOGGER.info(
|
||||
"Successfully downloaded and extracted PR #%s (commit %s) to %s",
|
||||
pr_number,
|
||||
head_sha[:8],
|
||||
pr_dir,
|
||||
)
|
||||
|
||||
size_mb = await hass.async_add_executor_job(_get_directory_size_mb, pr_dir)
|
||||
_LOGGER.info("PR #%s cache size: %.1f MB", pr_number, size_mb)
|
||||
|
||||
# Warn if total cache size exceeds threshold
|
||||
total_cache_size = await hass.async_add_executor_job(
|
||||
_get_directory_size_mb, cache_dir
|
||||
)
|
||||
if total_cache_size > CACHE_WARNING_SIZE_MB:
|
||||
_LOGGER.warning(
|
||||
"Frontend PR cache directory has grown to %.1f MB (threshold: %d MB). "
|
||||
"Consider manually cleaning up old PR caches in %s",
|
||||
total_cache_size,
|
||||
CACHE_WARNING_SIZE_MB,
|
||||
cache_dir,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
_LOGGER.error("%s", err)
|
||||
return None
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error downloading PR #%s", pr_number)
|
||||
return None
|
||||
else:
|
||||
return frontend_dir
|
||||
@@ -7,11 +7,20 @@
|
||||
"benzene": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"nitrogen_dioxide": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"nitrogen_monoxide": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"non_methane_hydrocarbons": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"ozone": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"sulphur_dioxide": {
|
||||
"default": "mdi:molecule"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_air_quality_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["google_air_quality_api==3.0.0"]
|
||||
"requirements": ["google_air_quality_api==2.1.2"]
|
||||
}
|
||||
|
||||
@@ -13,11 +13,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
)
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -118,7 +114,6 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.co.concentration.units,
|
||||
exists_fn=lambda x: "co" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.co.concentration.value,
|
||||
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="nh3",
|
||||
@@ -146,16 +141,16 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="no2",
|
||||
translation_key="nitrogen_dioxide",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.NITROGEN_DIOXIDE,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.no2.concentration.units,
|
||||
exists_fn=lambda x: "no2" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.no2.concentration.value,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="o3",
|
||||
translation_key="ozone",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.o3.concentration.units,
|
||||
exists_fn=lambda x: "o3" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.o3.concentration.value,
|
||||
@@ -178,8 +173,8 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="so2",
|
||||
translation_key="sulphur_dioxide",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.SULPHUR_DIOXIDE,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.so2.concentration.units,
|
||||
exists_fn=lambda x: "so2" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.so2.concentration.value,
|
||||
|
||||
@@ -205,12 +205,21 @@
|
||||
"so2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
}
|
||||
},
|
||||
"nitrogen_dioxide": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]"
|
||||
},
|
||||
"nitrogen_monoxide": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]"
|
||||
},
|
||||
"non_methane_hydrocarbons": {
|
||||
"name": "Non-methane hydrocarbons"
|
||||
},
|
||||
"ozone": {
|
||||
"name": "[%key:component::sensor::entity_component::ozone::name%]"
|
||||
},
|
||||
"sulphur_dioxide": {
|
||||
"name": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
},
|
||||
"uaqi": {
|
||||
"name": "Universal Air Quality Index"
|
||||
},
|
||||
|
||||
@@ -83,9 +83,6 @@
|
||||
"invalid_credentials": "Input is incomplete. You must provide either your login details or an API token",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components import frontend
|
||||
from homeassistant.components import frontend, panel_custom
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.const import ATTR_ICON
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -33,7 +33,7 @@ async def async_setup_addon_panel(hass: HomeAssistant, hassio: HassIO) -> None:
|
||||
# _register_panel never suspends and is only
|
||||
# a coroutine because it would be a breaking change
|
||||
# to make it a normal function
|
||||
_register_panel(hass, addon, data)
|
||||
await _register_panel(hass, addon, data)
|
||||
|
||||
|
||||
class HassIOAddonPanel(HomeAssistantView):
|
||||
@@ -58,7 +58,7 @@ class HassIOAddonPanel(HomeAssistantView):
|
||||
data = panels[addon]
|
||||
|
||||
# Register panel
|
||||
_register_panel(self.hass, addon, data)
|
||||
await _register_panel(self.hass, addon, data)
|
||||
return web.Response()
|
||||
|
||||
async def delete(self, request: web.Request, addon: str) -> web.Response:
|
||||
@@ -76,14 +76,18 @@ class HassIOAddonPanel(HomeAssistantView):
|
||||
return {}
|
||||
|
||||
|
||||
def _register_panel(hass: HomeAssistant, addon: str, data: dict[str, Any]):
|
||||
async def _register_panel(
|
||||
hass: HomeAssistant, addon: str, data: dict[str, Any]
|
||||
) -> None:
|
||||
"""Init coroutine to register the panel."""
|
||||
frontend.async_register_built_in_panel(
|
||||
await panel_custom.async_register_panel(
|
||||
hass,
|
||||
"app",
|
||||
frontend_url_path=addon,
|
||||
webcomponent_name="hassio-main",
|
||||
sidebar_title=data[ATTR_TITLE],
|
||||
sidebar_icon=data[ATTR_ICON],
|
||||
js_url="/api/hassio/app/entrypoint.js",
|
||||
embed_iframe=True,
|
||||
require_admin=data[ATTR_ADMIN],
|
||||
config={"addon": addon},
|
||||
config={"ingress": addon},
|
||||
)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["hdfury==1.4.2"]
|
||||
"requirements": ["hdfury==1.3.1"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["imgw_pib==2.0.1"]
|
||||
"requirements": ["imgw_pib==1.6.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["kostal"],
|
||||
"requirements": ["pykoplenti==1.5.0"]
|
||||
"requirements": ["pykoplenti==1.3.0"]
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
"name": "[%key:component::light::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Light is off"
|
||||
"name": "If a light is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more lights are on.",
|
||||
@@ -59,7 +59,7 @@
|
||||
"name": "[%key:component::light::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Light is on"
|
||||
"name": "If a light is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
||||
@@ -1,47 +1,24 @@
|
||||
"""Provides triggers for lights."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityNumericalStateAttributeChangedTriggerBase,
|
||||
EntityNumericalStateAttributeCrossedThresholdTriggerBase,
|
||||
Trigger,
|
||||
make_entity_numerical_state_attribute_changed_trigger,
|
||||
make_entity_numerical_state_attribute_crossed_threshold_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from . import ATTR_BRIGHTNESS
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def _convert_uint8_to_percentage(value: Any) -> float:
|
||||
"""Convert a uint8 value (0-255) to a percentage (0-100)."""
|
||||
return (float(value) / 255.0) * 100.0
|
||||
|
||||
|
||||
class BrightnessChangedTrigger(EntityNumericalStateAttributeChangedTriggerBase):
|
||||
"""Trigger for brightness changed."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
|
||||
|
||||
class BrightnessCrossedThresholdTrigger(
|
||||
EntityNumericalStateAttributeCrossedThresholdTriggerBase
|
||||
):
|
||||
"""Trigger for brightness crossed threshold."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"brightness_changed": BrightnessChangedTrigger,
|
||||
"brightness_crossed_threshold": BrightnessCrossedThresholdTrigger,
|
||||
"brightness_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_BRIGHTNESS
|
||||
),
|
||||
"brightness_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_BRIGHTNESS
|
||||
),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_target_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
@@ -22,10 +22,7 @@
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
max: 100
|
||||
min: 0
|
||||
mode: box
|
||||
unit_of_measurement: "%"
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
|
||||
@@ -50,7 +50,7 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Check connection to the Mealie API."""
|
||||
assert self.host is not None
|
||||
|
||||
if "/app/" in self.host:
|
||||
if "/hassio/ingress/" in self.host:
|
||||
return {"base": "ingress_url"}, None
|
||||
|
||||
client = MealieClient(
|
||||
|
||||
@@ -73,6 +73,15 @@ SHARED_OPTIONS = [
|
||||
CONF_STATE_TOPIC,
|
||||
]
|
||||
|
||||
MQTT_ORIGIN_INFO_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_SW_VERSION): cv.string,
|
||||
vol.Optional(CONF_SUPPORT_URL): cv.configuration_url,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
_MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema(
|
||||
{
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aionfty"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiontfy==0.7.0"]
|
||||
"requirements": ["aiontfy==0.6.1"]
|
||||
}
|
||||
|
||||
@@ -43,7 +43,6 @@ ATTR_ICON = "icon"
|
||||
ATTR_MARKDOWN = "markdown"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_TAGS = "tags"
|
||||
ATTR_SEQUENCE_ID = "sequence_id"
|
||||
|
||||
SERVICE_PUBLISH_SCHEMA = cv.make_entity_service_schema(
|
||||
{
|
||||
@@ -61,7 +60,6 @@ SERVICE_PUBLISH_SCHEMA = cv.make_entity_service_schema(
|
||||
vol.Optional(ATTR_EMAIL): vol.Email(),
|
||||
vol.Optional(ATTR_CALL): cv.string,
|
||||
vol.Optional(ATTR_ICON): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_SEQUENCE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -88,8 +88,3 @@ publish:
|
||||
type: url
|
||||
autocomplete: url
|
||||
example: https://example.org/logo.png
|
||||
sequence_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
example: "Mc3otamDNcpJ"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"common": {
|
||||
"add_topic_description": "Set up a topic for notifications.",
|
||||
"sequence_id": "Sequence ID",
|
||||
"topic": "Topic"
|
||||
},
|
||||
"config": {
|
||||
@@ -172,9 +171,6 @@
|
||||
"icon": { "name": "Icon" },
|
||||
"message": { "name": "Message" },
|
||||
"priority": { "name": "Priority" },
|
||||
"sequence_id": {
|
||||
"name": "[%key:component::ntfy::common::sequence_id%]"
|
||||
},
|
||||
"tags": { "name": "Tags" },
|
||||
"time": { "name": "Time" },
|
||||
"title": { "name": "Title" },
|
||||
@@ -360,10 +356,6 @@
|
||||
"description": "All messages have a priority that defines how urgently your phone notifies you, depending on the configured vibration patterns, notification sounds, and visibility in the notification drawer or pop-over.",
|
||||
"name": "Message priority"
|
||||
},
|
||||
"sequence_id": {
|
||||
"description": "Enter a message or sequence ID to update an existing notification, or specify a sequence ID to reference later when updating, clearing (mark as read and dismiss), or deleting a notification.",
|
||||
"name": "[%key:component::ntfy::common::sequence_id%]"
|
||||
},
|
||||
"tags": {
|
||||
"description": "Add tags or emojis to the notification. Emojis (using shortcodes like smile) will appear in the notification title or message. Other tags will be displayed below the notification content.",
|
||||
"name": "Tags/Emojis"
|
||||
|
||||
@@ -125,7 +125,7 @@ class NumberDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppb` (parts per billion), `ppm` (parts per million), `mg/m³`, `μg/m³`
|
||||
Unit of measurement: `ppm` (parts per million), `mg/m³`, `μg/m³`
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -247,7 +247,7 @@ class NumberDeviceClass(StrEnum):
|
||||
NITROGEN_DIOXIDE = "nitrogen_dioxide"
|
||||
"""Amount of NO2.
|
||||
|
||||
Unit of measurement: `ppb` (parts per billion), `μg/m³`
|
||||
Unit of measurement: `μg/m³`
|
||||
"""
|
||||
|
||||
NITROGEN_MONOXIDE = "nitrogen_monoxide"
|
||||
@@ -265,7 +265,7 @@ class NumberDeviceClass(StrEnum):
|
||||
OZONE = "ozone"
|
||||
"""Amount of O3.
|
||||
|
||||
Unit of measurement: `ppb` (parts per billion), `μg/m³`
|
||||
Unit of measurement: `μg/m³`
|
||||
"""
|
||||
|
||||
PH = "ph"
|
||||
@@ -483,7 +483,6 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
NumberDeviceClass.BATTERY: {PERCENTAGE},
|
||||
NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
NumberDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
@@ -517,16 +516,10 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
NumberDeviceClass.ILLUMINANCE: {LIGHT_LUX},
|
||||
NumberDeviceClass.IRRADIANCE: set(UnitOfIrradiance),
|
||||
NumberDeviceClass.MOISTURE: {PERCENTAGE},
|
||||
NumberDeviceClass.NITROGEN_DIOXIDE: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
NumberDeviceClass.NITROGEN_DIOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.NITROGEN_MONOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.OZONE: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
NumberDeviceClass.OZONE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.PH: {None},
|
||||
NumberDeviceClass.PM1: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
NumberDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
|
||||
@@ -8,9 +8,6 @@
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
|
||||
@@ -178,7 +178,6 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
file,
|
||||
upload_chunk_size=upload_chunk_size,
|
||||
session=async_get_clientsession(self._hass),
|
||||
smart_chunk_size=True,
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
raise BackupAgentError(
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.1.1"]
|
||||
"requirements": ["onedrive-personal-sdk==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -25,9 +25,6 @@
|
||||
"folder_creation_error": "Failed to create folder",
|
||||
"folder_rename_error": "Failed to rename folder"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"folder_name": {
|
||||
"data": {
|
||||
|
||||
@@ -13,9 +13,6 @@
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/qnap_qsw",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioqsw"],
|
||||
"requirements": ["aioqsw==0.4.2"]
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"codeowners": ["@rabbit-air"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rabbitair",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-rabbitair==0.0.8"],
|
||||
"zeroconf": ["_rabbitair._udp.local."]
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/radiotherm",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["radiotherm"],
|
||||
"requirements": ["radiotherm==2.1.0"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@konikvranik", "@allenporter"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainbird",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyrainbird"],
|
||||
"requirements": ["pyrainbird==6.0.1"]
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["usb"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainforest_raven",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aioraven==0.7.1"],
|
||||
"usb": [
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rapt_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["rapt-ble==0.1.2"]
|
||||
}
|
||||
|
||||
@@ -7,9 +7,6 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown_license_plate": "Unknown license plate"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "Add vehicle"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
|
||||
@@ -59,8 +59,6 @@ from homeassistant.util.unit_conversion import (
|
||||
InformationConverter,
|
||||
MassConverter,
|
||||
MassVolumeConcentrationConverter,
|
||||
NitrogenDioxideConcentrationConverter,
|
||||
OzoneConcentrationConverter,
|
||||
PowerConverter,
|
||||
PressureConverter,
|
||||
ReactiveEnergyConverter,
|
||||
@@ -227,10 +225,8 @@ _PRIMARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
|
||||
|
||||
_SECONDARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
NitrogenDioxideConcentrationConverter,
|
||||
OzoneConcentrationConverter,
|
||||
SulphurDioxideConcentrationConverter,
|
||||
TemperatureDeltaConverter,
|
||||
SulphurDioxideConcentrationConverter,
|
||||
]
|
||||
|
||||
STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
|
||||
|
||||
@@ -33,8 +33,6 @@ from homeassistant.util.unit_conversion import (
|
||||
InformationConverter,
|
||||
MassConverter,
|
||||
MassVolumeConcentrationConverter,
|
||||
NitrogenDioxideConcentrationConverter,
|
||||
OzoneConcentrationConverter,
|
||||
PowerConverter,
|
||||
PressureConverter,
|
||||
ReactiveEnergyConverter,
|
||||
@@ -87,14 +85,11 @@ UNIT_SCHEMA = vol.Schema(
|
||||
vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS),
|
||||
vol.Optional("duration"): vol.In(DurationConverter.VALID_UNITS),
|
||||
vol.Optional("electric_current"): vol.In(ElectricCurrentConverter.VALID_UNITS),
|
||||
vol.Optional("voltage"): vol.In(ElectricPotentialConverter.VALID_UNITS),
|
||||
vol.Optional("energy"): vol.In(EnergyConverter.VALID_UNITS),
|
||||
vol.Optional("energy_distance"): vol.In(EnergyDistanceConverter.VALID_UNITS),
|
||||
vol.Optional("information"): vol.In(InformationConverter.VALID_UNITS),
|
||||
vol.Optional("mass"): vol.In(MassConverter.VALID_UNITS),
|
||||
vol.Optional("nitrogen_dioxide"): vol.In(
|
||||
NitrogenDioxideConcentrationConverter.VALID_UNITS
|
||||
),
|
||||
vol.Optional("ozone"): vol.In(OzoneConcentrationConverter.VALID_UNITS),
|
||||
vol.Optional("power"): vol.In(PowerConverter.VALID_UNITS),
|
||||
vol.Optional("pressure"): vol.In(PressureConverter.VALID_UNITS),
|
||||
vol.Optional("reactive_energy"): vol.In(ReactiveEnergyConverter.VALID_UNITS),
|
||||
@@ -108,7 +103,6 @@ UNIT_SCHEMA = vol.Schema(
|
||||
TemperatureDeltaConverter.VALID_UNITS
|
||||
),
|
||||
vol.Optional("unitless"): vol.In(UnitlessRatioConverter.VALID_UNITS),
|
||||
vol.Optional("voltage"): vol.In(ElectricPotentialConverter.VALID_UNITS),
|
||||
vol.Optional("volume"): vol.In(VolumeConverter.VALID_UNITS),
|
||||
vol.Optional("volume_flow_rate"): vol.In(VolumeFlowRateConverter.VALID_UNITS),
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@ashionky"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/refoss",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["refoss-ha==1.2.5"],
|
||||
"single_config_entry": true
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/rehlko",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiokem"],
|
||||
"quality_scale": "silver",
|
||||
|
||||
@@ -12,9 +12,6 @@
|
||||
"invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"kamereon": {
|
||||
"data": {
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@jimmyd-be"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/renson",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["renson-endura-delta==1.7.2"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@danielhiversen", "@elupus", "@RobBie1221"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rfxtrx",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["RFXtrx"],
|
||||
"requirements": ["pyRFXtrx==0.31.1"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@milanmeu", "@frenck", "@quebulm"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rituals_perfume_genie",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyrituals"],
|
||||
"requirements": ["pyrituals==0.0.7"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@xeniter"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/romy",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["romy==0.0.10"],
|
||||
"zeroconf": ["_aicu-http._tcp.local."]
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/roomba",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["paho_mqtt", "roombapy"],
|
||||
"requirements": ["roombapy==1.9.0"],
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@pavoni"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/roon",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["roonapi"],
|
||||
"requirements": ["roonapi==0.1.6"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rova",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["rova"],
|
||||
"requirements": ["rova==0.4.1"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@noahhusby"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/russound_rio",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"quality_scale": "silver",
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ruuvi_gateway",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["aioruuvigateway==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ruuvitag_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["ruuvitag-ble==0.4.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@OnFreund", "@elad-bar", "@maorcc"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rympro",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyrympro==0.0.9"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@shaiu", "@jpbede"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/sabnzbd",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysabnzbd"],
|
||||
"quality_scale": "bronze",
|
||||
|
||||
@@ -44,6 +44,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: LeilSaunaConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: LeilSaunaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
await entry.runtime_data.client.async_close()
|
||||
entry.runtime_data.client.close()
|
||||
|
||||
return unload_ok
|
||||
|
||||
@@ -51,7 +51,7 @@ async def validate_input(data: dict[str, Any]) -> None:
|
||||
# Try to read data to verify communication
|
||||
await client.async_get_data()
|
||||
finally:
|
||||
await client.async_close()
|
||||
client.close()
|
||||
|
||||
|
||||
class LeilSaunaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysaunum"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pysaunum==0.3.0"]
|
||||
"requirements": ["pysaunum==0.2.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@dknowles2"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/schlage",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyschlage==2025.9.0"]
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/sense",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["sense_energy"],
|
||||
"requirements": ["sense-energy==0.13.8"]
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
"homekit": {
|
||||
"models": ["Sensibo"]
|
||||
},
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pysensibo"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -63,8 +63,6 @@ from homeassistant.util.unit_conversion import (
|
||||
InformationConverter,
|
||||
MassConverter,
|
||||
MassVolumeConcentrationConverter,
|
||||
NitrogenDioxideConcentrationConverter,
|
||||
OzoneConcentrationConverter,
|
||||
PowerConverter,
|
||||
PressureConverter,
|
||||
ReactiveEnergyConverter,
|
||||
@@ -161,7 +159,7 @@ class SensorDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppb` (parts per billion), `ppm` (parts per million), `mg/m³`, `μg/m³`
|
||||
Unit of measurement: `ppm` (parts per million), `mg/m³`, `μg/m³`
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -285,7 +283,7 @@ class SensorDeviceClass(StrEnum):
|
||||
NITROGEN_DIOXIDE = "nitrogen_dioxide"
|
||||
"""Amount of NO2.
|
||||
|
||||
Unit of measurement: `ppb` (parts per billion), `μg/m³`
|
||||
Unit of measurement: `μg/m³`
|
||||
"""
|
||||
|
||||
NITROGEN_MONOXIDE = "nitrogen_monoxide"
|
||||
@@ -303,7 +301,7 @@ class SensorDeviceClass(StrEnum):
|
||||
OZONE = "ozone"
|
||||
"""Amount of O3.
|
||||
|
||||
Unit of measurement: `ppb` (parts per billion),`μg/m³`
|
||||
Unit of measurement: `μg/m³`
|
||||
"""
|
||||
|
||||
PH = "ph"
|
||||
@@ -565,8 +563,6 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] =
|
||||
SensorDeviceClass.ENERGY_DISTANCE: EnergyDistanceConverter,
|
||||
SensorDeviceClass.ENERGY_STORAGE: EnergyConverter,
|
||||
SensorDeviceClass.GAS: VolumeConverter,
|
||||
SensorDeviceClass.NITROGEN_DIOXIDE: NitrogenDioxideConcentrationConverter,
|
||||
SensorDeviceClass.OZONE: OzoneConcentrationConverter,
|
||||
SensorDeviceClass.POWER: PowerConverter,
|
||||
SensorDeviceClass.POWER_FACTOR: UnitlessRatioConverter,
|
||||
SensorDeviceClass.PRECIPITATION: DistanceConverter,
|
||||
@@ -601,7 +597,6 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
SensorDeviceClass.BATTERY: {PERCENTAGE},
|
||||
SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
SensorDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
@@ -635,16 +630,10 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
SensorDeviceClass.ILLUMINANCE: {LIGHT_LUX},
|
||||
SensorDeviceClass.IRRADIANCE: set(UnitOfIrradiance),
|
||||
SensorDeviceClass.MOISTURE: {PERCENTAGE},
|
||||
SensorDeviceClass.NITROGEN_DIOXIDE: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
SensorDeviceClass.NITROGEN_DIOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.NITROGEN_MONOXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.OZONE: {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
SensorDeviceClass.OZONE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.PH: {None},
|
||||
SensorDeviceClass.PM1: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
SensorDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Provides conditions for sirens."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_off": make_entity_state_condition(DOMAIN, STATE_OFF),
|
||||
"is_on": make_entity_state_condition(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the siren conditions."""
|
||||
return CONDITIONS
|
||||
@@ -1,17 +0,0 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
entity:
|
||||
domain: siren
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
is_off: *condition_common
|
||||
is_on: *condition_common
|
||||
@@ -1,12 +1,4 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"condition": "mdi:bullhorn-outline"
|
||||
},
|
||||
"is_on": {
|
||||
"condition": "mdi:bullhorn"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:bullhorn"
|
||||
|
||||
@@ -1,32 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_description": "How the state should match on the targeted sirens.",
|
||||
"condition_behavior_name": "Behavior",
|
||||
"trigger_behavior_description": "The behavior of the targeted sirens to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Tests if one or more sirens are off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::siren::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::siren::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a siren is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more sirens are on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::siren::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::siren::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a siren is on"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::siren::title%]",
|
||||
@@ -42,12 +18,6 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
|
||||
@@ -18,9 +18,6 @@
|
||||
"error": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"data": {
|
||||
|
||||
@@ -16,9 +16,6 @@
|
||||
"create_entry": {
|
||||
"default": "Successfully authenticated with Spotify."
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"data": {
|
||||
|
||||
@@ -185,9 +185,6 @@ async def make_device_data(
|
||||
"Smart Lock Lite",
|
||||
"Smart Lock Pro",
|
||||
"Smart Lock Ultra",
|
||||
"Smart Lock Vision",
|
||||
"Smart Lock Vision Pro",
|
||||
"Smart Lock Pro Wifi",
|
||||
]:
|
||||
coordinator = await coordinator_for_device(
|
||||
hass, entry, api, device, coordinators_by_id
|
||||
|
||||
@@ -92,18 +92,6 @@ BINARY_SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
CALIBRATION_DESCRIPTION,
|
||||
DOOR_OPEN_DESCRIPTION,
|
||||
),
|
||||
"Smart Lock Vision": (
|
||||
CALIBRATION_DESCRIPTION,
|
||||
DOOR_OPEN_DESCRIPTION,
|
||||
),
|
||||
"Smart Lock Vision Pro": (
|
||||
CALIBRATION_DESCRIPTION,
|
||||
DOOR_OPEN_DESCRIPTION,
|
||||
),
|
||||
"Smart Lock Pro Wifi": (
|
||||
CALIBRATION_DESCRIPTION,
|
||||
DOOR_OPEN_DESCRIPTION,
|
||||
),
|
||||
"Curtain": (CALIBRATION_DESCRIPTION,),
|
||||
"Curtain3": (CALIBRATION_DESCRIPTION,),
|
||||
"Roller Shade": (CALIBRATION_DESCRIPTION,),
|
||||
|
||||
@@ -46,7 +46,7 @@ class SwitchBotCloudLock(SwitchBotCloudEntity, LockEntity):
|
||||
"""Set attributes from coordinator data."""
|
||||
if coord_data := self.coordinator.data:
|
||||
self._attr_is_locked = coord_data["lockState"] == "locked"
|
||||
if self.__model != "Smart Lock Lite":
|
||||
if self.__model in LockV2Commands.get_supported_devices():
|
||||
self._attr_supported_features = LockEntityFeature.OPEN
|
||||
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -225,9 +225,6 @@ SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
|
||||
"Smart Lock Lite": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Pro": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Ultra": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Vision": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Vision Pro": (BATTERY_DESCRIPTION,),
|
||||
"Smart Lock Pro Wifi": (BATTERY_DESCRIPTION,),
|
||||
"Relay Switch 2PM": (
|
||||
RELAY_SWITCH_2PM_POWER_DESCRIPTION,
|
||||
RELAY_SWITCH_2PM_VOLTAGE_DESCRIPTION,
|
||||
|
||||
@@ -49,8 +49,8 @@ DEFAULT_NAME = "Template Select"
|
||||
|
||||
SELECT_COMMON_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_OPTIONS): cv.template,
|
||||
vol.Required(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(ATTR_OPTIONS): cv.template,
|
||||
vol.Optional(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(CONF_STATE): cv.template,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -8,6 +8,7 @@ import logging
|
||||
import aiohttp
|
||||
from aiohttp.client_exceptions import ClientError, ClientResponseError
|
||||
import tibber
|
||||
from tibber import data_api as tibber_data_api
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
@@ -22,7 +23,13 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util, ssl as ssl_util
|
||||
|
||||
from .const import AUTH_IMPLEMENTATION, DATA_HASS_CONFIG, DOMAIN, TibberConfigEntry
|
||||
from .const import (
|
||||
AUTH_IMPLEMENTATION,
|
||||
CONF_LEGACY_ACCESS_TOKEN,
|
||||
DATA_HASS_CONFIG,
|
||||
DOMAIN,
|
||||
TibberConfigEntry,
|
||||
)
|
||||
from .coordinator import TibberDataAPICoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
@@ -37,23 +44,24 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class TibberRuntimeData:
|
||||
"""Runtime data for Tibber API entries."""
|
||||
|
||||
tibber_connection: tibber.Tibber
|
||||
session: OAuth2Session
|
||||
data_api_coordinator: TibberDataAPICoordinator | None = field(default=None)
|
||||
_client: tibber.Tibber | None = None
|
||||
_client: tibber_data_api.TibberDataAPI | None = None
|
||||
|
||||
async def async_get_client(self, hass: HomeAssistant) -> tibber.Tibber:
|
||||
"""Return an authenticated Tibber client."""
|
||||
async def async_get_client(
|
||||
self, hass: HomeAssistant
|
||||
) -> tibber_data_api.TibberDataAPI:
|
||||
"""Return an authenticated Tibber Data API client."""
|
||||
await self.session.async_ensure_token_valid()
|
||||
token = self.session.token
|
||||
access_token = token.get(CONF_ACCESS_TOKEN)
|
||||
if not access_token:
|
||||
raise ConfigEntryAuthFailed("Access token missing from OAuth session")
|
||||
if self._client is None:
|
||||
self._client = tibber.Tibber(
|
||||
access_token=access_token,
|
||||
self._client = tibber_data_api.TibberDataAPI(
|
||||
access_token,
|
||||
websession=async_get_clientsession(hass),
|
||||
time_zone=dt_util.get_default_time_zone(),
|
||||
ssl=ssl_util.get_default_context(),
|
||||
)
|
||||
self._client.set_access_token(access_token)
|
||||
return self._client
|
||||
@@ -80,6 +88,32 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
|
||||
translation_key="data_api_reauth_required",
|
||||
)
|
||||
|
||||
tibber_connection = tibber.Tibber(
|
||||
access_token=entry.data[CONF_LEGACY_ACCESS_TOKEN],
|
||||
websession=async_get_clientsession(hass),
|
||||
time_zone=dt_util.get_default_time_zone(),
|
||||
ssl=ssl_util.get_default_context(),
|
||||
)
|
||||
|
||||
async def _close(event: Event) -> None:
|
||||
await tibber_connection.rt_disconnect()
|
||||
|
||||
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
|
||||
|
||||
try:
|
||||
await tibber_connection.update_info()
|
||||
except (
|
||||
TimeoutError,
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpExceptionError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
except tibber.InvalidLoginError as exp:
|
||||
_LOGGER.error("Failed to login. %s", exp)
|
||||
return False
|
||||
except tibber.FatalHttpExceptionError:
|
||||
return False
|
||||
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
@@ -101,29 +135,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
entry.runtime_data = TibberRuntimeData(
|
||||
tibber_connection=tibber_connection,
|
||||
session=session,
|
||||
)
|
||||
|
||||
tibber_connection = await entry.runtime_data.async_get_client(hass)
|
||||
|
||||
async def _close(event: Event) -> None:
|
||||
await tibber_connection.rt_disconnect()
|
||||
|
||||
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
|
||||
|
||||
try:
|
||||
await tibber_connection.update_info()
|
||||
except (
|
||||
TimeoutError,
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpExceptionError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
except tibber.InvalidLoginError as err:
|
||||
raise ConfigEntryAuthFailed("Invalid login credentials") from err
|
||||
except tibber.FatalHttpExceptionError as err:
|
||||
raise ConfigEntryNotReady("Fatal HTTP error from Tibber API") from err
|
||||
|
||||
coordinator = TibberDataAPICoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data.data_api_coordinator = coordinator
|
||||
@@ -139,6 +154,5 @@ async def async_unload_entry(
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(
|
||||
config_entry, PLATFORMS
|
||||
):
|
||||
tibber_connection = await config_entry.runtime_data.async_get_client(hass)
|
||||
await tibber_connection.rt_disconnect()
|
||||
await config_entry.runtime_data.tibber_connection.rt_disconnect()
|
||||
return unload_ok
|
||||
|
||||
@@ -8,16 +8,21 @@ from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import tibber
|
||||
from tibber import data_api as tibber_data_api
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
|
||||
|
||||
from .const import DATA_API_DEFAULT_SCOPES, DOMAIN
|
||||
from .const import CONF_LEGACY_ACCESS_TOKEN, DATA_API_DEFAULT_SCOPES, DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema({vol.Required(CONF_LEGACY_ACCESS_TOKEN): str})
|
||||
ERR_TIMEOUT = "timeout"
|
||||
ERR_CLIENT = "cannot_connect"
|
||||
ERR_TOKEN = "invalid_access_token"
|
||||
TOKEN_URL = "https://developer.tibber.com/settings/access-token"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,7 +36,8 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self._oauth_data: dict[str, Any] | None = None
|
||||
self._access_token: str | None = None
|
||||
self._title = ""
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
@@ -46,70 +52,114 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
"scope": " ".join(DATA_API_DEFAULT_SCOPES),
|
||||
}
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reauth flow."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication by reusing the user step."""
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return await self.async_step_user()
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
|
||||
)
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Finalize the OAuth flow and create the config entry."""
|
||||
self._oauth_data = data
|
||||
return await self._async_validate_and_create()
|
||||
return self.async_show_form(
|
||||
step_id=SOURCE_USER,
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"url": TOKEN_URL},
|
||||
errors={},
|
||||
)
|
||||
|
||||
async def async_step_connection_error(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle connection error retry."""
|
||||
if user_input is not None:
|
||||
return await self._async_validate_and_create()
|
||||
return self.async_show_form(step_id="connection_error")
|
||||
|
||||
async def _async_validate_and_create(self) -> ConfigFlowResult:
|
||||
"""Validate the OAuth token and create the config entry."""
|
||||
assert self._oauth_data is not None
|
||||
access_token = self._oauth_data[CONF_TOKEN][CONF_ACCESS_TOKEN]
|
||||
self._access_token = user_input[CONF_LEGACY_ACCESS_TOKEN].replace(" ", "")
|
||||
tibber_connection = tibber.Tibber(
|
||||
access_token=access_token,
|
||||
access_token=self._access_token,
|
||||
websession=async_get_clientsession(self.hass),
|
||||
)
|
||||
self._title = tibber_connection.name or "Tibber"
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
await tibber_connection.update_info()
|
||||
except TimeoutError:
|
||||
return await self.async_step_connection_error()
|
||||
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TIMEOUT
|
||||
except tibber.InvalidLoginError:
|
||||
return self.async_abort(reason=ERR_TOKEN)
|
||||
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TOKEN
|
||||
except (
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpExceptionError,
|
||||
tibber.FatalHttpExceptionError,
|
||||
):
|
||||
return await self.async_step_connection_error()
|
||||
except tibber.FatalHttpExceptionError:
|
||||
return self.async_abort(reason=ERR_CLIENT)
|
||||
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_CLIENT
|
||||
|
||||
if errors:
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=SOURCE_USER,
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"url": TOKEN_URL},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
await self.async_set_unique_id(tibber_connection.user_id)
|
||||
|
||||
title = tibber_connection.name or "Tibber"
|
||||
if self.source == SOURCE_REAUTH:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._abort_if_unique_id_mismatch(
|
||||
reason="wrong_account",
|
||||
description_placeholders={"title": reauth_entry.title},
|
||||
)
|
||||
else:
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return await self.async_step_pick_implementation()
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reauth flow."""
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
|
||||
self._title = reauth_entry.title
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication by reusing the user step."""
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
|
||||
self._title = reauth_entry.title
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
)
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Finalize the OAuth flow and create the config entry."""
|
||||
if self._access_token is None:
|
||||
return self.async_abort(reason="missing_configuration")
|
||||
|
||||
data[CONF_LEGACY_ACCESS_TOKEN] = self._access_token
|
||||
|
||||
access_token = data[CONF_TOKEN][CONF_ACCESS_TOKEN]
|
||||
data_api_client = tibber_data_api.TibberDataAPI(
|
||||
access_token,
|
||||
websession=async_get_clientsession(self.hass),
|
||||
)
|
||||
|
||||
try:
|
||||
await data_api_client.get_userinfo()
|
||||
except (aiohttp.ClientError, TimeoutError):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data=self._oauth_data,
|
||||
title=title,
|
||||
data=data,
|
||||
title=self._title,
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=title, data=self._oauth_data)
|
||||
return self.async_create_entry(title=self._title, data=data)
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import TibberRuntimeData
|
||||
@@ -12,6 +13,8 @@ if TYPE_CHECKING:
|
||||
type TibberConfigEntry = ConfigEntry[TibberRuntimeData]
|
||||
|
||||
|
||||
CONF_LEGACY_ACCESS_TOKEN = CONF_ACCESS_TOKEN
|
||||
|
||||
AUTH_IMPLEMENTATION = "auth_implementation"
|
||||
DATA_HASS_CONFIG = "tibber_hass_config"
|
||||
DOMAIN = "tibber"
|
||||
|
||||
@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, cast
|
||||
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
import tibber
|
||||
from tibber.data_api import TibberDevice
|
||||
from tibber.data_api import TibberDataAPI, TibberDevice
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
@@ -230,26 +230,28 @@ class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
|
||||
return device_sensors.get(sensor_id)
|
||||
return None
|
||||
|
||||
async def _async_get_client(self) -> tibber.Tibber:
|
||||
"""Get the Tibber client with error handling."""
|
||||
async def _async_get_client(self) -> TibberDataAPI:
|
||||
"""Get the Tibber Data API client with error handling."""
|
||||
try:
|
||||
return await self._runtime_data.async_get_client(self.hass)
|
||||
except ConfigEntryAuthFailed:
|
||||
raise
|
||||
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
|
||||
raise UpdateFailed(f"Unable to create Tibber client: {err}") from err
|
||||
raise UpdateFailed(
|
||||
f"Unable to create Tibber Data API client: {err}"
|
||||
) from err
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Initial load of Tibber Data API devices."""
|
||||
client = await self._async_get_client()
|
||||
devices = await client.data_api.get_all_devices()
|
||||
devices = await client.get_all_devices()
|
||||
self._build_sensor_lookup(devices)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, TibberDevice]:
|
||||
"""Fetch the latest device capabilities from the Tibber Data API."""
|
||||
client = await self._async_get_client()
|
||||
try:
|
||||
devices: dict[str, TibberDevice] = await client.data_api.update_devices()
|
||||
devices: dict[str, TibberDevice] = await client.update_devices()
|
||||
except tibber.exceptions.RateLimitExceededError as err:
|
||||
raise UpdateFailed(
|
||||
f"Rate limit exceeded, retry after {err.retry_after} seconds",
|
||||
|
||||
@@ -15,7 +15,6 @@ async def async_get_config_entry_diagnostics(
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
runtime = config_entry.runtime_data
|
||||
tibber_connection = await runtime.async_get_client(hass)
|
||||
result: dict[str, Any] = {
|
||||
"homes": [
|
||||
{
|
||||
@@ -25,7 +24,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"last_cons_data_timestamp": home.last_cons_data_timestamp,
|
||||
"country": home.country,
|
||||
}
|
||||
for home in tibber_connection.get_homes(only_active=False)
|
||||
for home in runtime.tibber_connection.get_homes(only_active=False)
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import tibber
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
ATTR_TITLE_DEFAULT,
|
||||
NotifyEntity,
|
||||
@@ -39,9 +37,7 @@ class TibberNotificationEntity(NotifyEntity):
|
||||
|
||||
async def async_send_message(self, message: str, title: str | None = None) -> None:
|
||||
"""Send a message to Tibber devices."""
|
||||
tibber_connection: tibber.Tibber = (
|
||||
await self._entry.runtime_data.async_get_client(self.hass)
|
||||
)
|
||||
tibber_connection = self._entry.runtime_data.tibber_connection
|
||||
try:
|
||||
await tibber_connection.send_notification(
|
||||
title or ATTR_TITLE_DEFAULT, message
|
||||
|
||||
@@ -605,7 +605,7 @@ async def _async_setup_graphql_sensors(
|
||||
) -> None:
|
||||
"""Set up the Tibber sensor."""
|
||||
|
||||
tibber_connection = await entry.runtime_data.async_get_client(hass)
|
||||
tibber_connection = entry.runtime_data.tibber_connection
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ async def __get_prices(call: ServiceCall) -> ServiceResponse:
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_config_entry",
|
||||
)
|
||||
tibber_connection = await entries[0].runtime_data.async_get_client(call.hass)
|
||||
tibber_connection = entries[0].runtime_data.tibber_connection
|
||||
|
||||
start = __get_date(call.data.get(ATTR_START), "start")
|
||||
end = __get_date(call.data.get(ATTR_END), "end")
|
||||
|
||||
@@ -2,21 +2,26 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "The connected account does not match {title}. Sign in with the same Tibber account and try again."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]",
|
||||
"timeout": "[%key:common::config_flow::error::timeout_connect%]"
|
||||
},
|
||||
"step": {
|
||||
"connection_error": {
|
||||
"description": "Could not connect to Tibber. Check your internet connection and try again.",
|
||||
"title": "Connection failed"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Reconnect your Tibber account to refresh access.",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"access_token": "[%key:common::config_flow::data::access_token%]"
|
||||
},
|
||||
"description": "Enter your access token from {url}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -165,7 +165,7 @@ class DeviceListener(SharingDeviceListener):
|
||||
self,
|
||||
device: CustomerDevice,
|
||||
updated_status_properties: list[str] | None = None,
|
||||
dp_timestamps: dict[str, int] | None = None,
|
||||
dp_timestamps: dict | None = None,
|
||||
) -> None:
|
||||
"""Update device status with optional DP timestamps."""
|
||||
LOGGER.debug(
|
||||
|
||||
@@ -471,11 +471,9 @@ class TuyaBinarySensorEntity(TuyaEntity, BinarySensorEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -57,7 +57,7 @@ class TuyaEntity(Entity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
) -> None:
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -218,10 +218,10 @@ class TuyaEventEntity(TuyaEntity, EventEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
) -> None:
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
self.device, updated_status_properties
|
||||
) or not (event_data := self._dpcode_wrapper.read_device_status(self.device)):
|
||||
return
|
||||
|
||||
|
||||
@@ -31,10 +31,7 @@ class DeviceWrapper[T]:
|
||||
options: list[str]
|
||||
|
||||
def skip_update(
|
||||
self,
|
||||
device: CustomerDevice,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
self, device: CustomerDevice, updated_status_properties: list[str] | None
|
||||
) -> bool:
|
||||
"""Determine if the wrapper should skip an update.
|
||||
|
||||
@@ -65,10 +62,7 @@ class DPCodeWrapper(DeviceWrapper):
|
||||
self.dpcode = dpcode
|
||||
|
||||
def skip_update(
|
||||
self,
|
||||
device: CustomerDevice,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
self, device: CustomerDevice, updated_status_properties: list[str] | None
|
||||
) -> bool:
|
||||
"""Determine if the wrapper should skip an update.
|
||||
|
||||
|
||||
@@ -554,12 +554,10 @@ class TuyaNumberEntity(TuyaEntity, NumberEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -410,12 +410,10 @@ class TuyaSelectEntity(TuyaEntity, SelectEntity):
|
||||
async def _handle_state_update(
|
||||
self,
|
||||
updated_status_properties: list[str] | None,
|
||||
dp_timestamps: dict[str, int] | None,
|
||||
dp_timestamps: dict | None = None,
|
||||
) -> None:
|
||||
"""Handle state update, only if this entity's dpcode was actually updated."""
|
||||
if self._dpcode_wrapper.skip_update(
|
||||
self.device, updated_status_properties, dp_timestamps
|
||||
):
|
||||
if self._dpcode_wrapper.skip_update(self.device, updated_status_properties):
|
||||
return
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user