mirror of
https://github.com/home-assistant/core.git
synced 2026-01-23 08:07:00 +01:00
Compare commits
32 Commits
add_trigge
...
adjust_cli
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff7c98542d | ||
|
|
5bbc39bd88 | ||
|
|
6b14eb7ad1 | ||
|
|
83a53dea94 | ||
|
|
4fb89e68a7 | ||
|
|
5202ddf095 | ||
|
|
f7d7a4502e | ||
|
|
c7417d77b5 | ||
|
|
22018f1f80 | ||
|
|
22c6704d81 | ||
|
|
0552934b3c | ||
|
|
bbe1d28e88 | ||
|
|
b700a27c8f | ||
|
|
0566a668a9 | ||
|
|
94f636bc2d | ||
|
|
a6e7546142 | ||
|
|
493319894b | ||
|
|
987396722b | ||
|
|
4f52b0363d | ||
|
|
52e18ed6f6 | ||
|
|
4180175fd3 | ||
|
|
e39ee8cae7 | ||
|
|
c214193087 | ||
|
|
2d84847be5 | ||
|
|
0d69fd4535 | ||
|
|
56f556864c | ||
|
|
c1b03dc553 | ||
|
|
07e76578e6 | ||
|
|
bc45fd4e45 | ||
|
|
0ea03f549c | ||
|
|
0ee46dbf5d | ||
|
|
e12f394f8e |
7
.github/workflows/ci.yaml
vendored
7
.github/workflows/ci.yaml
vendored
@@ -1187,6 +1187,8 @@ jobs:
|
||||
- pytest-postgres
|
||||
- pytest-mariadb
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
id-token: write
|
||||
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||
# therefore we can't run it on forks
|
||||
if: |
|
||||
@@ -1198,8 +1200,9 @@ jobs:
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
with:
|
||||
report_type: test_results
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
use_oidc: true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.13.0
|
||||
rev: v0.14.13
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If an alarm is armed"
|
||||
"name": "Alarm is armed"
|
||||
},
|
||||
"is_armed_away": {
|
||||
"description": "Tests if one or more alarms are armed in away mode.",
|
||||
@@ -24,7 +24,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If an alarm is armed away"
|
||||
"name": "Alarm is armed away"
|
||||
},
|
||||
"is_armed_home": {
|
||||
"description": "Tests if one or more alarms are armed in home mode.",
|
||||
@@ -34,7 +34,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If an alarm is armed home"
|
||||
"name": "Alarm is armed home"
|
||||
},
|
||||
"is_armed_night": {
|
||||
"description": "Tests if one or more alarms are armed in night mode.",
|
||||
@@ -44,7 +44,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If an alarm is armed night"
|
||||
"name": "Alarm is armed night"
|
||||
},
|
||||
"is_armed_vacation": {
|
||||
"description": "Tests if one or more alarms are armed in vacation mode.",
|
||||
@@ -54,7 +54,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If an alarm is armed vacation"
|
||||
"name": "Alarm is armed vacation"
|
||||
},
|
||||
"is_disarmed": {
|
||||
"description": "Tests if one or more alarms are disarmed.",
|
||||
@@ -64,7 +64,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If an alarm is disarmed"
|
||||
"name": "Alarm is disarmed"
|
||||
},
|
||||
"is_triggered": {
|
||||
"description": "Tests if one or more alarms are triggered.",
|
||||
@@ -74,7 +74,7 @@
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If an alarm is triggered"
|
||||
"name": "Alarm is triggered"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
||||
@@ -5,9 +5,14 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import logging
|
||||
from random import randrange
|
||||
import sys
|
||||
from typing import Any, cast
|
||||
|
||||
from pyatv import connect, exceptions, scan
|
||||
from pyatv.conf import AppleTV
|
||||
from pyatv.const import DeviceModel, Protocol
|
||||
from pyatv.convert import model_str
|
||||
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -24,11 +29,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
@@ -42,18 +43,6 @@ from .const import (
|
||||
SIGNAL_DISCONNECTED,
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
from pyatv import connect, exceptions, scan
|
||||
from pyatv.conf import AppleTV
|
||||
from pyatv.const import DeviceModel, Protocol
|
||||
from pyatv.convert import model_str
|
||||
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
|
||||
else:
|
||||
|
||||
class DeviceListener:
|
||||
"""Dummy class."""
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME_TV = "Apple TV"
|
||||
@@ -64,30 +53,25 @@ BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
AUTH_EXCEPTIONS = (
|
||||
exceptions.AuthenticationError,
|
||||
exceptions.InvalidCredentialsError,
|
||||
exceptions.NoCredentialsError,
|
||||
)
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
||||
OSError,
|
||||
asyncio.CancelledError,
|
||||
TimeoutError,
|
||||
exceptions.ConnectionLostError,
|
||||
exceptions.ConnectionFailedError,
|
||||
)
|
||||
DEVICE_EXCEPTIONS = (
|
||||
exceptions.ProtocolError,
|
||||
exceptions.NoServiceError,
|
||||
exceptions.PairingError,
|
||||
exceptions.BackOffError,
|
||||
exceptions.DeviceIdMissingError,
|
||||
)
|
||||
else:
|
||||
AUTH_EXCEPTIONS = ()
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = ()
|
||||
DEVICE_EXCEPTIONS = ()
|
||||
AUTH_EXCEPTIONS = (
|
||||
exceptions.AuthenticationError,
|
||||
exceptions.InvalidCredentialsError,
|
||||
exceptions.NoCredentialsError,
|
||||
)
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
||||
OSError,
|
||||
asyncio.CancelledError,
|
||||
TimeoutError,
|
||||
exceptions.ConnectionLostError,
|
||||
exceptions.ConnectionFailedError,
|
||||
)
|
||||
DEVICE_EXCEPTIONS = (
|
||||
exceptions.ProtocolError,
|
||||
exceptions.NoServiceError,
|
||||
exceptions.PairingError,
|
||||
exceptions.BackOffError,
|
||||
exceptions.DeviceIdMissingError,
|
||||
)
|
||||
|
||||
|
||||
type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
||||
@@ -95,10 +79,6 @@ type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool:
|
||||
"""Set up a config entry for Apple TV."""
|
||||
if sys.version_info >= (3, 14):
|
||||
raise HomeAssistantError(
|
||||
"Apple TV is not supported on Python 3.14. Please use Python 3.13."
|
||||
)
|
||||
manager = AppleTVManager(hass, entry)
|
||||
|
||||
if manager.is_on:
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.1;python_version<'3.14'"],
|
||||
"requirements": ["pyatv==0.17.0"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
||||
@@ -239,6 +239,15 @@ class AppleTvMediaPlayer(
|
||||
"""
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def volume_device_update(
|
||||
self, output_device: OutputDevice, old_level: float, new_level: float
|
||||
) -> None:
|
||||
"""Output device volume was updated.
|
||||
|
||||
This is a callback function from pyatv.interface.AudioListener.
|
||||
"""
|
||||
|
||||
@callback
|
||||
def outputdevices_update(
|
||||
self, old_devices: list[OutputDevice], new_devices: list[OutputDevice]
|
||||
|
||||
@@ -2,14 +2,35 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import ArveConfigEntry, ArveCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version == 1:
|
||||
# 1 -> 1.2: Unique ID from integer to string
|
||||
if entry.minor_version == 1:
|
||||
minor_version = 2
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=str(entry.unique_id), minor_version=minor_version
|
||||
)
|
||||
|
||||
_LOGGER.debug("Migration successful")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool:
|
||||
"""Set up Arve from a config entry."""
|
||||
|
||||
|
||||
@@ -19,6 +19,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class ArveConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Arve."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -35,7 +38,7 @@ class ArveConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
except ArveConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(customer.customerId)
|
||||
await self.async_set_unique_id(str(customer.customerId))
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title="Arve",
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a satellite is idle"
|
||||
"name": "Satellite is idle"
|
||||
},
|
||||
"is_listening": {
|
||||
"description": "Tests if one or more Assist satellites are listening.",
|
||||
@@ -24,7 +24,7 @@
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a satellite is listening"
|
||||
"name": "Satellite is listening"
|
||||
},
|
||||
"is_processing": {
|
||||
"description": "Tests if one or more Assist satellites are processing.",
|
||||
@@ -34,7 +34,7 @@
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a satellite is processing"
|
||||
"name": "Satellite is processing"
|
||||
},
|
||||
"is_responding": {
|
||||
"description": "Tests if one or more Assist satellites are responding.",
|
||||
@@ -44,7 +44,7 @@
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a satellite is responding"
|
||||
"name": "Satellite is responding"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
|
||||
@@ -56,7 +56,7 @@ from homeassistant.core import (
|
||||
valid_entity_id,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError
|
||||
from homeassistant.helpers import condition, config_validation as cv
|
||||
from homeassistant.helpers import condition as condition_helper, config_validation as cv
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
@@ -554,7 +554,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
automation_id: str | None,
|
||||
name: str,
|
||||
trigger_config: list[ConfigType],
|
||||
cond_func: IfAction | None,
|
||||
condition: IfAction | None,
|
||||
action_script: Script,
|
||||
initial_state: bool | None,
|
||||
variables: ScriptVariables | None,
|
||||
@@ -567,7 +567,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
self._attr_name = name
|
||||
self._trigger_config = trigger_config
|
||||
self._async_detach_triggers: CALLBACK_TYPE | None = None
|
||||
self._cond_func = cond_func
|
||||
self._condition = condition
|
||||
self.action_script = action_script
|
||||
self.action_script.change_listener = self.async_write_ha_state
|
||||
self._initial_state = initial_state
|
||||
@@ -602,9 +602,11 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced labels."""
|
||||
referenced = self.action_script.referenced_labels
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_labels(conf)
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(
|
||||
conf, ATTR_LABEL_ID
|
||||
)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
|
||||
@@ -615,9 +617,11 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced floors."""
|
||||
referenced = self.action_script.referenced_floors
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_floors(conf)
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(
|
||||
conf, ATTR_FLOOR_ID
|
||||
)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
|
||||
@@ -628,9 +632,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced areas."""
|
||||
referenced = self.action_script.referenced_areas
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_areas(conf)
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(conf, ATTR_AREA_ID)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
|
||||
@@ -648,9 +652,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced devices."""
|
||||
referenced = self.action_script.referenced_devices
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_devices(conf)
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_devices(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_trigger_extract_devices(conf))
|
||||
@@ -662,9 +666,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced entities."""
|
||||
referenced = self.action_script.referenced_entities
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_entities(conf)
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_entities(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
for entity_id in _trigger_extract_entities(conf):
|
||||
@@ -784,8 +788,8 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
|
||||
if (
|
||||
not skip_condition
|
||||
and self._cond_func is not None
|
||||
and not self._cond_func(variables)
|
||||
and self._condition is not None
|
||||
and not self._condition(variables)
|
||||
):
|
||||
self._logger.debug(
|
||||
"Conditions not met, aborting automation. Condition summary: %s",
|
||||
@@ -1047,12 +1051,12 @@ async def _create_automation_entities(
|
||||
)
|
||||
|
||||
if CONF_CONDITIONS in config_block:
|
||||
cond_func = await _async_process_if(hass, name, config_block)
|
||||
condition = await _async_process_if(hass, name, config_block)
|
||||
|
||||
if cond_func is None:
|
||||
if condition is None:
|
||||
continue
|
||||
else:
|
||||
cond_func = None
|
||||
condition = None
|
||||
|
||||
# Add trigger variables to variables
|
||||
variables = None
|
||||
@@ -1070,7 +1074,7 @@ async def _create_automation_entities(
|
||||
automation_id,
|
||||
name,
|
||||
config_block[CONF_TRIGGERS],
|
||||
cond_func,
|
||||
condition,
|
||||
action_script,
|
||||
initial_state,
|
||||
variables,
|
||||
@@ -1212,7 +1216,7 @@ async def _async_process_if(
|
||||
if_configs = config[CONF_CONDITIONS]
|
||||
|
||||
try:
|
||||
if_action = await condition.async_conditions_from_config(
|
||||
if_action = await condition_helper.async_conditions_from_config(
|
||||
hass, if_configs, LOGGER, name
|
||||
)
|
||||
except HomeAssistantError as ex:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted climates to trigger on.",
|
||||
"trigger_behavior_description": "The behavior of the targeted thermostats to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
@@ -298,22 +298,22 @@
|
||||
"name": "Set target temperature"
|
||||
},
|
||||
"toggle": {
|
||||
"description": "Toggles climate device, from on to off, or off to on.",
|
||||
"description": "Toggles thermostat, from on to off, or off to on.",
|
||||
"name": "[%key:common::action::toggle%]"
|
||||
},
|
||||
"turn_off": {
|
||||
"description": "Turns climate device off.",
|
||||
"description": "Turns thermostat off.",
|
||||
"name": "[%key:common::action::turn_off%]"
|
||||
},
|
||||
"turn_on": {
|
||||
"description": "Turns climate device on.",
|
||||
"description": "Turns thermostat on.",
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"description": "Triggers after the humidity measured by one or more climate-control devices changes.",
|
||||
"description": "Triggers after the humidity measured by one or more thermostats changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the humidity is above this value.",
|
||||
@@ -324,10 +324,10 @@
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current humidity changed"
|
||||
"name": "Thermostat current humidity changed"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity measured by one or more climate-control devices crosses a threshold.",
|
||||
"description": "Triggers after the humidity measured by one or more thermostats crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
@@ -346,10 +346,10 @@
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current humidity crossed threshold"
|
||||
"name": "Thermostat current humidity crossed threshold"
|
||||
},
|
||||
"current_temperature_changed": {
|
||||
"description": "Triggers after the temperature measured by one or more climate-control devices changes.",
|
||||
"description": "Triggers after the temperature measured by one or more thermostats changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the temperature is above this value.",
|
||||
@@ -360,10 +360,10 @@
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current temperature changed"
|
||||
"name": "Thermostat current temperature changed"
|
||||
},
|
||||
"current_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature measured by one or more climate-control devices crosses a threshold.",
|
||||
"description": "Triggers after the temperature measured by one or more thermostats crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
@@ -382,10 +382,10 @@
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current temperature crossed threshold"
|
||||
"name": "Thermostat current temperature crossed threshold"
|
||||
},
|
||||
"hvac_mode_changed": {
|
||||
"description": "Triggers after the mode of one or more climate-control devices changes.",
|
||||
"description": "Triggers after the mode of one or more thermostats changes.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
@@ -396,40 +396,40 @@
|
||||
"name": "Modes"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device mode changed"
|
||||
"name": "Thermostat mode changed"
|
||||
},
|
||||
"started_cooling": {
|
||||
"description": "Triggers after one or more climate-control devices start cooling.",
|
||||
"description": "Triggers after one or more thermostats start cooling.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device started cooling"
|
||||
"name": "Thermostat started cooling"
|
||||
},
|
||||
"started_drying": {
|
||||
"description": "Triggers after one or more climate-control devices start drying.",
|
||||
"description": "Triggers after one or more thermostats start drying.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device started drying"
|
||||
"name": "Thermostat started drying"
|
||||
},
|
||||
"started_heating": {
|
||||
"description": "Triggers after one or more climate-control devices start heating.",
|
||||
"description": "Triggers after one or more thermostats start heating.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device started heating"
|
||||
"name": "Thermostat started heating"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices changes.",
|
||||
"description": "Triggers after the humidity setpoint of one or more thermostats changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target humidity is above this value.",
|
||||
@@ -440,10 +440,10 @@
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity changed"
|
||||
"name": "Thermostat target humidity changed"
|
||||
},
|
||||
"target_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"description": "Triggers after the humidity setpoint of one or more thermostats crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
@@ -462,10 +462,10 @@
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity crossed threshold"
|
||||
"name": "Thermostat target humidity crossed threshold"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices changes.",
|
||||
"description": "Triggers after the temperature setpoint of one or more thermostats changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target temperature is above this value.",
|
||||
@@ -476,10 +476,10 @@
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature changed"
|
||||
"name": "Thermostat target temperature changed"
|
||||
},
|
||||
"target_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"description": "Triggers after the temperature setpoint of one or more thermostats crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
@@ -498,27 +498,27 @@
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature crossed threshold"
|
||||
"name": "Thermostat target temperature crossed threshold"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers after one or more climate-control devices turn off.",
|
||||
"description": "Triggers after one or more thermostats turn off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device turned off"
|
||||
"name": "Thermostat turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers after one or more climate-control devices turn on, regardless of the mode.",
|
||||
"description": "Triggers after one or more thermostats turn on, regardless of the mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device turned on"
|
||||
"name": "Thermostat turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"name": "[%key:component::fan::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a fan is off"
|
||||
"name": "Fan is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more fans are on.",
|
||||
@@ -24,7 +24,7 @@
|
||||
"name": "[%key:component::fan::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a fan is on"
|
||||
"name": "Fan is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
||||
@@ -12,9 +12,6 @@
|
||||
},
|
||||
"non_methane_hydrocarbons": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"ozone": {
|
||||
"default": "mdi:molecule"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_air_quality_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["google_air_quality_api==2.1.2"]
|
||||
"requirements": ["google_air_quality_api==3.0.0"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,11 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -114,6 +118,7 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.co.concentration.units,
|
||||
exists_fn=lambda x: "co" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.co.concentration.value,
|
||||
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="nh3",
|
||||
@@ -149,8 +154,8 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="o3",
|
||||
translation_key="ozone",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.o3.concentration.units,
|
||||
exists_fn=lambda x: "o3" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.o3.concentration.value,
|
||||
|
||||
@@ -211,9 +211,6 @@
|
||||
"non_methane_hydrocarbons": {
|
||||
"name": "Non-methane hydrocarbons"
|
||||
},
|
||||
"ozone": {
|
||||
"name": "[%key:component::sensor::entity_component::ozone::name%]"
|
||||
},
|
||||
"uaqi": {
|
||||
"name": "Universal Air Quality Index"
|
||||
},
|
||||
|
||||
@@ -83,6 +83,9 @@
|
||||
"invalid_credentials": "Input is incomplete. You must provide either your login details or an API token",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components import frontend, panel_custom
|
||||
from homeassistant.components import frontend
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.const import ATTR_ICON
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -33,7 +33,7 @@ async def async_setup_addon_panel(hass: HomeAssistant, hassio: HassIO) -> None:
|
||||
# _register_panel never suspends and is only
|
||||
# a coroutine because it would be a breaking change
|
||||
# to make it a normal function
|
||||
await _register_panel(hass, addon, data)
|
||||
_register_panel(hass, addon, data)
|
||||
|
||||
|
||||
class HassIOAddonPanel(HomeAssistantView):
|
||||
@@ -58,7 +58,7 @@ class HassIOAddonPanel(HomeAssistantView):
|
||||
data = panels[addon]
|
||||
|
||||
# Register panel
|
||||
await _register_panel(self.hass, addon, data)
|
||||
_register_panel(self.hass, addon, data)
|
||||
return web.Response()
|
||||
|
||||
async def delete(self, request: web.Request, addon: str) -> web.Response:
|
||||
@@ -76,18 +76,14 @@ class HassIOAddonPanel(HomeAssistantView):
|
||||
return {}
|
||||
|
||||
|
||||
async def _register_panel(
|
||||
hass: HomeAssistant, addon: str, data: dict[str, Any]
|
||||
) -> None:
|
||||
def _register_panel(hass: HomeAssistant, addon: str, data: dict[str, Any]):
|
||||
"""Init coroutine to register the panel."""
|
||||
await panel_custom.async_register_panel(
|
||||
frontend.async_register_built_in_panel(
|
||||
hass,
|
||||
"app",
|
||||
frontend_url_path=addon,
|
||||
webcomponent_name="hassio-main",
|
||||
sidebar_title=data[ATTR_TITLE],
|
||||
sidebar_icon=data[ATTR_ICON],
|
||||
js_url="/api/hassio/app/entrypoint.js",
|
||||
embed_iframe=True,
|
||||
require_admin=data[ATTR_ADMIN],
|
||||
config={"ingress": addon},
|
||||
config={"addon": addon},
|
||||
)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["hdfury==1.3.1"]
|
||||
"requirements": ["hdfury==1.4.2"]
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL
|
||||
from .entity import AqualinkEntity
|
||||
@@ -66,7 +67,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AqualinkConfigEntry) ->
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
|
||||
aqualink = AqualinkClient(username, password, httpx_client=get_async_client(hass))
|
||||
aqualink = AqualinkClient(
|
||||
username,
|
||||
password,
|
||||
httpx_client=get_async_client(hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2),
|
||||
)
|
||||
try:
|
||||
await aqualink.login()
|
||||
except AqualinkServiceException as login_exception:
|
||||
|
||||
@@ -15,6 +15,7 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -36,7 +37,11 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
async with AqualinkClient(
|
||||
username, password, httpx_client=get_async_client(self.hass)
|
||||
username,
|
||||
password,
|
||||
httpx_client=get_async_client(
|
||||
self.hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
|
||||
),
|
||||
):
|
||||
pass
|
||||
except AqualinkServiceUnauthorizedException:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["imgw_pib==1.6.0"]
|
||||
"requirements": ["imgw_pib==2.0.1"]
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
"name": "[%key:component::light::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a light is off"
|
||||
"name": "Light is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more lights are on.",
|
||||
@@ -59,7 +59,7 @@
|
||||
"name": "[%key:component::light::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a light is on"
|
||||
"name": "Light is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
||||
@@ -106,9 +106,6 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"muted": {
|
||||
"trigger": "mdi:volume-mute"
|
||||
},
|
||||
"stopped_playing": {
|
||||
"trigger": "mdi:stop"
|
||||
}
|
||||
|
||||
@@ -380,16 +380,6 @@
|
||||
},
|
||||
"title": "Media player",
|
||||
"triggers": {
|
||||
"muted": {
|
||||
"description": "Triggers after one or more media players are muted.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::media_player::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Media player muted"
|
||||
},
|
||||
"stopped_playing": {
|
||||
"description": "Triggers after one or more media players stop playing media.",
|
||||
"fields": {
|
||||
|
||||
@@ -1,35 +1,12 @@
|
||||
"""Provides triggers for media players."""
|
||||
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_transition_trigger
|
||||
|
||||
from . import ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, MediaPlayerState
|
||||
from . import MediaPlayerState
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class MediaPlayerMutedTrigger(EntityTriggerBase):
|
||||
"""Class for media player muted triggers."""
|
||||
|
||||
_domain: str = DOMAIN
|
||||
|
||||
def is_muted(self, state: State) -> bool:
|
||||
"""Check if the media player is muted."""
|
||||
return (
|
||||
state.attributes.get(ATTR_MEDIA_VOLUME_MUTED) is True
|
||||
or state.attributes.get(ATTR_MEDIA_VOLUME_LEVEL) == 0
|
||||
)
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
return self.is_muted(state)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"muted": MediaPlayerMutedTrigger,
|
||||
"stopped_playing": make_entity_transition_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.trigger_common: &trigger_common
|
||||
stopped_playing:
|
||||
target:
|
||||
entity:
|
||||
domain: media_player
|
||||
@@ -13,6 +13,3 @@
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
|
||||
muted: *trigger_common
|
||||
stopped_playing: *trigger_common
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from dataclasses import dataclass
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from microBeesPy import MicroBees
|
||||
@@ -15,6 +16,8 @@ from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import MicroBeesUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class HomeAssistantMicroBeesData:
|
||||
@@ -25,6 +28,23 @@ class HomeAssistantMicroBeesData:
|
||||
session: config_entry_oauth2_flow.OAuth2Session
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version == 1:
|
||||
# 1 -> 1.2: Unique ID from integer to string
|
||||
if entry.minor_version == 1:
|
||||
minor_version = 2
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=str(entry.unique_id), minor_version=minor_version
|
||||
)
|
||||
|
||||
_LOGGER.debug("Migration successful")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up microBees from a config entry."""
|
||||
implementation = (
|
||||
|
||||
@@ -19,6 +19,8 @@ class OAuth2FlowHandler(
|
||||
"""Handle a config flow for microBees."""
|
||||
|
||||
DOMAIN = DOMAIN
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
@@ -47,7 +49,7 @@ class OAuth2FlowHandler(
|
||||
self.logger.exception("Unexpected error")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
await self.async_set_unique_id(current_user.id)
|
||||
await self.async_set_unique_id(str(current_user.id))
|
||||
if self.source != SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -15,9 +17,28 @@ from .api import AuthenticatedMonzoAPI
|
||||
from .const import DOMAIN
|
||||
from .coordinator import MonzoCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version == 1:
|
||||
# 1 -> 1.2: Unique ID from integer to string
|
||||
if entry.minor_version == 1:
|
||||
minor_version = 2
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=str(entry.unique_id), minor_version=minor_version
|
||||
)
|
||||
|
||||
_LOGGER.debug("Migration successful")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Monzo from a config entry."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
|
||||
@@ -21,6 +21,8 @@ class MonzoFlowHandler(
|
||||
"""Handle a config flow."""
|
||||
|
||||
DOMAIN = DOMAIN
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
oauth_data: dict[str, Any]
|
||||
|
||||
@@ -51,7 +53,7 @@ class MonzoFlowHandler(
|
||||
"""Create an entry for the flow."""
|
||||
self.oauth_data = data
|
||||
user_id = data[CONF_TOKEN]["user_id"]
|
||||
await self.async_set_unique_id(user_id)
|
||||
await self.async_set_unique_id(str(user_id))
|
||||
if self.source != SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aionfty"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiontfy==0.6.1"]
|
||||
"requirements": ["aiontfy==0.7.0"]
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ ATTR_ICON = "icon"
|
||||
ATTR_MARKDOWN = "markdown"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_TAGS = "tags"
|
||||
ATTR_SEQUENCE_ID = "sequence_id"
|
||||
|
||||
SERVICE_PUBLISH_SCHEMA = cv.make_entity_service_schema(
|
||||
{
|
||||
@@ -60,6 +61,7 @@ SERVICE_PUBLISH_SCHEMA = cv.make_entity_service_schema(
|
||||
vol.Optional(ATTR_EMAIL): vol.Email(),
|
||||
vol.Optional(ATTR_CALL): cv.string,
|
||||
vol.Optional(ATTR_ICON): vol.All(vol.Url(), vol.Coerce(URL)),
|
||||
vol.Optional(ATTR_SEQUENCE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -88,3 +88,8 @@ publish:
|
||||
type: url
|
||||
autocomplete: url
|
||||
example: https://example.org/logo.png
|
||||
sequence_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
example: "Mc3otamDNcpJ"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"common": {
|
||||
"add_topic_description": "Set up a topic for notifications.",
|
||||
"sequence_id": "Sequence ID",
|
||||
"topic": "Topic"
|
||||
},
|
||||
"config": {
|
||||
@@ -171,6 +172,9 @@
|
||||
"icon": { "name": "Icon" },
|
||||
"message": { "name": "Message" },
|
||||
"priority": { "name": "Priority" },
|
||||
"sequence_id": {
|
||||
"name": "[%key:component::ntfy::common::sequence_id%]"
|
||||
},
|
||||
"tags": { "name": "Tags" },
|
||||
"time": { "name": "Time" },
|
||||
"title": { "name": "Title" },
|
||||
@@ -356,6 +360,10 @@
|
||||
"description": "All messages have a priority that defines how urgently your phone notifies you, depending on the configured vibration patterns, notification sounds, and visibility in the notification drawer or pop-over.",
|
||||
"name": "Message priority"
|
||||
},
|
||||
"sequence_id": {
|
||||
"description": "Enter a message or sequence ID to update an existing notification, or specify a sequence ID to reference later when updating, clearing (mark as read and dismiss), or deleting a notification.",
|
||||
"name": "[%key:component::ntfy::common::sequence_id%]"
|
||||
},
|
||||
"tags": {
|
||||
"description": "Add tags or emojis to the notification. Emojis (using shortcodes like smile) will appear in the notification title or message. Other tags will be displayed below the notification content.",
|
||||
"name": "Tags/Emojis"
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@konikvranik", "@allenporter"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/rainbird",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyrainbird"],
|
||||
"requirements": ["pyrainbird==6.0.1"]
|
||||
|
||||
@@ -7,6 +7,9 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown_license_plate": "Unknown license plate"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "Add vehicle"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
|
||||
@@ -228,9 +228,9 @@ _PRIMARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
|
||||
_SECONDARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
NitrogenDioxideConcentrationConverter,
|
||||
TemperatureDeltaConverter,
|
||||
OzoneConcentrationConverter,
|
||||
SulphurDioxideConcentrationConverter,
|
||||
TemperatureDeltaConverter,
|
||||
]
|
||||
|
||||
STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
|
||||
|
||||
@@ -87,7 +87,6 @@ UNIT_SCHEMA = vol.Schema(
|
||||
vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS),
|
||||
vol.Optional("duration"): vol.In(DurationConverter.VALID_UNITS),
|
||||
vol.Optional("electric_current"): vol.In(ElectricCurrentConverter.VALID_UNITS),
|
||||
vol.Optional("voltage"): vol.In(ElectricPotentialConverter.VALID_UNITS),
|
||||
vol.Optional("energy"): vol.In(EnergyConverter.VALID_UNITS),
|
||||
vol.Optional("energy_distance"): vol.In(EnergyDistanceConverter.VALID_UNITS),
|
||||
vol.Optional("information"): vol.In(InformationConverter.VALID_UNITS),
|
||||
@@ -109,6 +108,7 @@ UNIT_SCHEMA = vol.Schema(
|
||||
TemperatureDeltaConverter.VALID_UNITS
|
||||
),
|
||||
vol.Optional("unitless"): vol.In(UnitlessRatioConverter.VALID_UNITS),
|
||||
vol.Optional("voltage"): vol.In(ElectricPotentialConverter.VALID_UNITS),
|
||||
vol.Optional("volume"): vol.In(VolumeConverter.VALID_UNITS),
|
||||
vol.Optional("volume_flow_rate"): vol.In(VolumeFlowRateConverter.VALID_UNITS),
|
||||
}
|
||||
|
||||
@@ -12,6 +12,9 @@
|
||||
"invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"kamereon": {
|
||||
"data": {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@tomaszsluszniak"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/sanix",
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["sanix==1.0.6"]
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/screenlogic",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["screenlogicpy"],
|
||||
"requirements": ["screenlogicpy==0.10.2"]
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
"homekit": {
|
||||
"models": ["Sensibo"]
|
||||
},
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pysensibo"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -49,8 +49,8 @@ DEFAULT_NAME = "Template Select"
|
||||
|
||||
SELECT_COMMON_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_OPTIONS): cv.template,
|
||||
vol.Optional(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Required(ATTR_OPTIONS): cv.template,
|
||||
vol.Required(CONF_SELECT_OPTION): cv.SCRIPT_SCHEMA,
|
||||
vol.Optional(CONF_STATE): cv.template,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -8,7 +8,6 @@ import logging
|
||||
import aiohttp
|
||||
from aiohttp.client_exceptions import ClientError, ClientResponseError
|
||||
import tibber
|
||||
from tibber import data_api as tibber_data_api
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
@@ -23,13 +22,7 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util, ssl as ssl_util
|
||||
|
||||
from .const import (
|
||||
AUTH_IMPLEMENTATION,
|
||||
CONF_LEGACY_ACCESS_TOKEN,
|
||||
DATA_HASS_CONFIG,
|
||||
DOMAIN,
|
||||
TibberConfigEntry,
|
||||
)
|
||||
from .const import AUTH_IMPLEMENTATION, DATA_HASS_CONFIG, DOMAIN, TibberConfigEntry
|
||||
from .coordinator import TibberDataAPICoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
@@ -44,24 +37,23 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class TibberRuntimeData:
|
||||
"""Runtime data for Tibber API entries."""
|
||||
|
||||
tibber_connection: tibber.Tibber
|
||||
session: OAuth2Session
|
||||
data_api_coordinator: TibberDataAPICoordinator | None = field(default=None)
|
||||
_client: tibber_data_api.TibberDataAPI | None = None
|
||||
_client: tibber.Tibber | None = None
|
||||
|
||||
async def async_get_client(
|
||||
self, hass: HomeAssistant
|
||||
) -> tibber_data_api.TibberDataAPI:
|
||||
"""Return an authenticated Tibber Data API client."""
|
||||
async def async_get_client(self, hass: HomeAssistant) -> tibber.Tibber:
|
||||
"""Return an authenticated Tibber client."""
|
||||
await self.session.async_ensure_token_valid()
|
||||
token = self.session.token
|
||||
access_token = token.get(CONF_ACCESS_TOKEN)
|
||||
if not access_token:
|
||||
raise ConfigEntryAuthFailed("Access token missing from OAuth session")
|
||||
if self._client is None:
|
||||
self._client = tibber_data_api.TibberDataAPI(
|
||||
access_token,
|
||||
self._client = tibber.Tibber(
|
||||
access_token=access_token,
|
||||
websession=async_get_clientsession(hass),
|
||||
time_zone=dt_util.get_default_time_zone(),
|
||||
ssl=ssl_util.get_default_context(),
|
||||
)
|
||||
self._client.set_access_token(access_token)
|
||||
return self._client
|
||||
@@ -88,32 +80,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
|
||||
translation_key="data_api_reauth_required",
|
||||
)
|
||||
|
||||
tibber_connection = tibber.Tibber(
|
||||
access_token=entry.data[CONF_LEGACY_ACCESS_TOKEN],
|
||||
websession=async_get_clientsession(hass),
|
||||
time_zone=dt_util.get_default_time_zone(),
|
||||
ssl=ssl_util.get_default_context(),
|
||||
)
|
||||
|
||||
async def _close(event: Event) -> None:
|
||||
await tibber_connection.rt_disconnect()
|
||||
|
||||
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
|
||||
|
||||
try:
|
||||
await tibber_connection.update_info()
|
||||
except (
|
||||
TimeoutError,
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpExceptionError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
except tibber.InvalidLoginError as exp:
|
||||
_LOGGER.error("Failed to login. %s", exp)
|
||||
return False
|
||||
except tibber.FatalHttpExceptionError:
|
||||
return False
|
||||
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
@@ -135,10 +101,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
entry.runtime_data = TibberRuntimeData(
|
||||
tibber_connection=tibber_connection,
|
||||
session=session,
|
||||
)
|
||||
|
||||
tibber_connection = await entry.runtime_data.async_get_client(hass)
|
||||
|
||||
async def _close(event: Event) -> None:
|
||||
await tibber_connection.rt_disconnect()
|
||||
|
||||
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
|
||||
|
||||
try:
|
||||
await tibber_connection.update_info()
|
||||
except (
|
||||
TimeoutError,
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpExceptionError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
except tibber.InvalidLoginError as err:
|
||||
raise ConfigEntryAuthFailed("Invalid login credentials") from err
|
||||
except tibber.FatalHttpExceptionError as err:
|
||||
raise ConfigEntryNotReady("Fatal HTTP error from Tibber API") from err
|
||||
|
||||
coordinator = TibberDataAPICoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data.data_api_coordinator = coordinator
|
||||
@@ -154,5 +139,6 @@ async def async_unload_entry(
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(
|
||||
config_entry, PLATFORMS
|
||||
):
|
||||
await config_entry.runtime_data.tibber_connection.rt_disconnect()
|
||||
tibber_connection = await config_entry.runtime_data.async_get_client(hass)
|
||||
await tibber_connection.rt_disconnect()
|
||||
return unload_ok
|
||||
|
||||
@@ -8,21 +8,16 @@ from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import tibber
|
||||
from tibber import data_api as tibber_data_api
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
|
||||
|
||||
from .const import CONF_LEGACY_ACCESS_TOKEN, DATA_API_DEFAULT_SCOPES, DOMAIN
|
||||
from .const import DATA_API_DEFAULT_SCOPES, DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema({vol.Required(CONF_LEGACY_ACCESS_TOKEN): str})
|
||||
ERR_TIMEOUT = "timeout"
|
||||
ERR_CLIENT = "cannot_connect"
|
||||
ERR_TOKEN = "invalid_access_token"
|
||||
TOKEN_URL = "https://developer.tibber.com/settings/access-token"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -36,8 +31,7 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self._access_token: str | None = None
|
||||
self._title = ""
|
||||
self._oauth_data: dict[str, Any] | None = None
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
@@ -52,114 +46,70 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
"scope": " ".join(DATA_API_DEFAULT_SCOPES),
|
||||
}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=SOURCE_USER,
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"url": TOKEN_URL},
|
||||
errors={},
|
||||
)
|
||||
|
||||
self._access_token = user_input[CONF_LEGACY_ACCESS_TOKEN].replace(" ", "")
|
||||
tibber_connection = tibber.Tibber(
|
||||
access_token=self._access_token,
|
||||
websession=async_get_clientsession(self.hass),
|
||||
)
|
||||
self._title = tibber_connection.name or "Tibber"
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
await tibber_connection.update_info()
|
||||
except TimeoutError:
|
||||
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TIMEOUT
|
||||
except tibber.InvalidLoginError:
|
||||
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_TOKEN
|
||||
except (
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpExceptionError,
|
||||
tibber.FatalHttpExceptionError,
|
||||
):
|
||||
errors[CONF_LEGACY_ACCESS_TOKEN] = ERR_CLIENT
|
||||
|
||||
if errors:
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
DATA_SCHEMA, {CONF_LEGACY_ACCESS_TOKEN: self._access_token or ""}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id=SOURCE_USER,
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"url": TOKEN_URL},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
await self.async_set_unique_id(tibber_connection.user_id)
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._abort_if_unique_id_mismatch(
|
||||
reason="wrong_account",
|
||||
description_placeholders={"title": reauth_entry.title},
|
||||
)
|
||||
else:
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return await self.async_step_pick_implementation()
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reauth flow."""
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
|
||||
self._title = reauth_entry.title
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication by reusing the user step."""
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._access_token = reauth_entry.data.get(CONF_LEGACY_ACCESS_TOKEN)
|
||||
self._title = reauth_entry.title
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
)
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Finalize the OAuth flow and create the config entry."""
|
||||
if self._access_token is None:
|
||||
return self.async_abort(reason="missing_configuration")
|
||||
self._oauth_data = data
|
||||
return await self._async_validate_and_create()
|
||||
|
||||
data[CONF_LEGACY_ACCESS_TOKEN] = self._access_token
|
||||
async def async_step_connection_error(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle connection error retry."""
|
||||
if user_input is not None:
|
||||
return await self._async_validate_and_create()
|
||||
return self.async_show_form(step_id="connection_error")
|
||||
|
||||
access_token = data[CONF_TOKEN][CONF_ACCESS_TOKEN]
|
||||
data_api_client = tibber_data_api.TibberDataAPI(
|
||||
access_token,
|
||||
async def _async_validate_and_create(self) -> ConfigFlowResult:
|
||||
"""Validate the OAuth token and create the config entry."""
|
||||
assert self._oauth_data is not None
|
||||
access_token = self._oauth_data[CONF_TOKEN][CONF_ACCESS_TOKEN]
|
||||
tibber_connection = tibber.Tibber(
|
||||
access_token=access_token,
|
||||
websession=async_get_clientsession(self.hass),
|
||||
)
|
||||
|
||||
try:
|
||||
await data_api_client.get_userinfo()
|
||||
except (aiohttp.ClientError, TimeoutError):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
await tibber_connection.update_info()
|
||||
except TimeoutError:
|
||||
return await self.async_step_connection_error()
|
||||
except tibber.InvalidLoginError:
|
||||
return self.async_abort(reason=ERR_TOKEN)
|
||||
except (
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpExceptionError,
|
||||
):
|
||||
return await self.async_step_connection_error()
|
||||
except tibber.FatalHttpExceptionError:
|
||||
return self.async_abort(reason=ERR_CLIENT)
|
||||
|
||||
await self.async_set_unique_id(tibber_connection.user_id)
|
||||
|
||||
title = tibber_connection.name or "Tibber"
|
||||
if self.source == SOURCE_REAUTH:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._abort_if_unique_id_mismatch(
|
||||
reason="wrong_account",
|
||||
description_placeholders={"title": reauth_entry.title},
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data=data,
|
||||
title=self._title,
|
||||
data=self._oauth_data,
|
||||
title=title,
|
||||
)
|
||||
|
||||
return self.async_create_entry(title=self._title, data=data)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=title, data=self._oauth_data)
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import TibberRuntimeData
|
||||
@@ -13,8 +12,6 @@ if TYPE_CHECKING:
|
||||
type TibberConfigEntry = ConfigEntry[TibberRuntimeData]
|
||||
|
||||
|
||||
CONF_LEGACY_ACCESS_TOKEN = CONF_ACCESS_TOKEN
|
||||
|
||||
AUTH_IMPLEMENTATION = "auth_implementation"
|
||||
DATA_HASS_CONFIG = "tibber_hass_config"
|
||||
DOMAIN = "tibber"
|
||||
|
||||
@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, cast
|
||||
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
import tibber
|
||||
from tibber.data_api import TibberDataAPI, TibberDevice
|
||||
from tibber.data_api import TibberDevice
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
@@ -230,28 +230,26 @@ class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
|
||||
return device_sensors.get(sensor_id)
|
||||
return None
|
||||
|
||||
async def _async_get_client(self) -> TibberDataAPI:
|
||||
"""Get the Tibber Data API client with error handling."""
|
||||
async def _async_get_client(self) -> tibber.Tibber:
|
||||
"""Get the Tibber client with error handling."""
|
||||
try:
|
||||
return await self._runtime_data.async_get_client(self.hass)
|
||||
except ConfigEntryAuthFailed:
|
||||
raise
|
||||
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
|
||||
raise UpdateFailed(
|
||||
f"Unable to create Tibber Data API client: {err}"
|
||||
) from err
|
||||
raise UpdateFailed(f"Unable to create Tibber client: {err}") from err
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Initial load of Tibber Data API devices."""
|
||||
client = await self._async_get_client()
|
||||
devices = await client.get_all_devices()
|
||||
devices = await client.data_api.get_all_devices()
|
||||
self._build_sensor_lookup(devices)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, TibberDevice]:
|
||||
"""Fetch the latest device capabilities from the Tibber Data API."""
|
||||
client = await self._async_get_client()
|
||||
try:
|
||||
devices: dict[str, TibberDevice] = await client.update_devices()
|
||||
devices: dict[str, TibberDevice] = await client.data_api.update_devices()
|
||||
except tibber.exceptions.RateLimitExceededError as err:
|
||||
raise UpdateFailed(
|
||||
f"Rate limit exceeded, retry after {err.retry_after} seconds",
|
||||
|
||||
@@ -15,6 +15,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
runtime = config_entry.runtime_data
|
||||
tibber_connection = await runtime.async_get_client(hass)
|
||||
result: dict[str, Any] = {
|
||||
"homes": [
|
||||
{
|
||||
@@ -24,7 +25,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"last_cons_data_timestamp": home.last_cons_data_timestamp,
|
||||
"country": home.country,
|
||||
}
|
||||
for home in runtime.tibber_connection.get_homes(only_active=False)
|
||||
for home in tibber_connection.get_homes(only_active=False)
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import tibber
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
ATTR_TITLE_DEFAULT,
|
||||
NotifyEntity,
|
||||
@@ -37,7 +39,9 @@ class TibberNotificationEntity(NotifyEntity):
|
||||
|
||||
async def async_send_message(self, message: str, title: str | None = None) -> None:
|
||||
"""Send a message to Tibber devices."""
|
||||
tibber_connection = self._entry.runtime_data.tibber_connection
|
||||
tibber_connection: tibber.Tibber = (
|
||||
await self._entry.runtime_data.async_get_client(self.hass)
|
||||
)
|
||||
try:
|
||||
await tibber_connection.send_notification(
|
||||
title or ATTR_TITLE_DEFAULT, message
|
||||
|
||||
@@ -605,7 +605,7 @@ async def _async_setup_graphql_sensors(
|
||||
) -> None:
|
||||
"""Set up the Tibber sensor."""
|
||||
|
||||
tibber_connection = entry.runtime_data.tibber_connection
|
||||
tibber_connection = await entry.runtime_data.async_get_client(hass)
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ async def __get_prices(call: ServiceCall) -> ServiceResponse:
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_config_entry",
|
||||
)
|
||||
tibber_connection = entries[0].runtime_data.tibber_connection
|
||||
tibber_connection = await entries[0].runtime_data.async_get_client(call.hass)
|
||||
|
||||
start = __get_date(call.data.get(ATTR_START), "start")
|
||||
end = __get_date(call.data.get(ATTR_END), "end")
|
||||
|
||||
@@ -2,26 +2,21 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "The connected account does not match {title}. Sign in with the same Tibber account and try again."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]",
|
||||
"timeout": "[%key:common::config_flow::error::timeout_connect%]"
|
||||
},
|
||||
"step": {
|
||||
"connection_error": {
|
||||
"description": "Could not connect to Tibber. Check your internet connection and try again.",
|
||||
"title": "Connection failed"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Reconnect your Tibber account to refresh access.",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"access_token": "[%key:common::config_flow::data::access_token%]"
|
||||
},
|
||||
"description": "Enter your access token from {url}"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -83,6 +83,14 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
return False
|
||||
|
||||
if entry.version == 2:
|
||||
# 2 -> 2.2: Unique ID from integer to string
|
||||
if entry.minor_version == 1:
|
||||
minor_version = 2
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=str(entry.unique_id), minor_version=minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
|
||||
DOMAIN = DOMAIN
|
||||
VERSION = 2
|
||||
MINOR_VERSION = 2
|
||||
|
||||
agreements: list[Agreement]
|
||||
data: dict[str, Any]
|
||||
@@ -92,7 +93,7 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
if self.migrate_entry:
|
||||
await self.hass.config_entries.async_remove(self.migrate_entry)
|
||||
|
||||
await self.async_set_unique_id(agreement.agreement_id)
|
||||
await self.async_set_unique_id(str(agreement.agreement_id))
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
self.data[CONF_AGREEMENT_ID] = agreement.agreement_id
|
||||
|
||||
@@ -8,6 +8,7 @@ import dataclasses
|
||||
from uiprotect.data import (
|
||||
NVR,
|
||||
Camera,
|
||||
Event,
|
||||
ModelType,
|
||||
MountType,
|
||||
ProtectAdoptableDeviceModel,
|
||||
@@ -644,6 +645,31 @@ class ProtectEventBinarySensor(EventEntityMixin, BinarySensorEntity):
|
||||
self._attr_is_on = False
|
||||
self._attr_extra_state_attributes = {}
|
||||
|
||||
@callback
|
||||
def _find_active_event_with_object_type(
|
||||
self, device: ProtectDeviceType
|
||||
) -> Event | None:
|
||||
"""Find an active event containing this sensor's object type.
|
||||
|
||||
Fallback for issue #152133: last_smart_detect_event_ids may not update
|
||||
immediately when a new detection type is added to an ongoing event.
|
||||
"""
|
||||
obj_type = self.entity_description.ufp_obj_type
|
||||
if obj_type is None or not isinstance(device, Camera):
|
||||
return None
|
||||
|
||||
# Check known active event IDs from camera first (fast path)
|
||||
for event_id in device.last_smart_detect_event_ids.values():
|
||||
if (
|
||||
event_id
|
||||
and (event := self.data.api.bootstrap.events.get(event_id))
|
||||
and event.end is None
|
||||
and obj_type in event.smart_detect_types
|
||||
):
|
||||
return event
|
||||
|
||||
return None
|
||||
|
||||
@callback
|
||||
def _async_update_device_from_protect(self, device: ProtectDeviceType) -> None:
|
||||
description = self.entity_description
|
||||
@@ -651,9 +677,15 @@ class ProtectEventBinarySensor(EventEntityMixin, BinarySensorEntity):
|
||||
prev_event = self._event
|
||||
prev_event_end = self._event_end
|
||||
super()._async_update_device_from_protect(device)
|
||||
if event := description.get_event_obj(device):
|
||||
|
||||
event = description.get_event_obj(device)
|
||||
if event is None:
|
||||
# Fallback for #152133: check active events directly
|
||||
event = self._find_active_event_with_object_type(device)
|
||||
|
||||
if event:
|
||||
self._event = event
|
||||
self._event_end = event.end if event else None
|
||||
self._event_end = event.end
|
||||
|
||||
if not (
|
||||
event
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["uiprotect==10.0.0", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==10.0.1", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -5824,7 +5824,7 @@
|
||||
},
|
||||
"sanix": {
|
||||
"name": "Sanix",
|
||||
"integration_type": "hub",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
|
||||
@@ -370,9 +370,13 @@ def _async_get_connector(
|
||||
return connectors[connector_key]
|
||||
|
||||
if verify_ssl:
|
||||
ssl_context: SSLContext = ssl_util.client_context(ssl_cipher)
|
||||
ssl_context: SSLContext = ssl_util.client_context(
|
||||
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
|
||||
)
|
||||
else:
|
||||
ssl_context = ssl_util.client_context_no_verify(ssl_cipher)
|
||||
ssl_context = ssl_util.client_context_no_verify(
|
||||
ssl_cipher, ssl_util.SSL_ALPN_HTTP11
|
||||
)
|
||||
|
||||
connector = HomeAssistantTCPConnector(
|
||||
family=family,
|
||||
|
||||
@@ -29,10 +29,7 @@ from typing import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_DEVICE_CLASS,
|
||||
ATTR_FLOOR_ID,
|
||||
ATTR_LABEL_ID,
|
||||
CONF_ABOVE,
|
||||
CONF_AFTER,
|
||||
CONF_ATTRIBUTE,
|
||||
@@ -1387,27 +1384,9 @@ def async_extract_devices(config: ConfigType | Template) -> set[str]:
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_areas(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract areas from a condition."""
|
||||
return _async_extract_targets(config, ATTR_AREA_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_floors(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract floors from a condition."""
|
||||
return _async_extract_targets(config, ATTR_FLOOR_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def async_extract_labels(config: ConfigType | Template) -> set[str]:
|
||||
"""Extract labels from a condition."""
|
||||
return _async_extract_targets(config, ATTR_LABEL_ID)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_extract_targets(
|
||||
def async_extract_targets(
|
||||
config: ConfigType | Template,
|
||||
target_type: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
|
||||
target_type: Literal["area_id", "floor_id", "label_id"],
|
||||
) -> set[str]:
|
||||
"""Extract targets from a condition."""
|
||||
referenced: set[str] = set()
|
||||
|
||||
@@ -17,6 +17,9 @@ from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.ssl import (
|
||||
SSL_ALPN_HTTP11,
|
||||
SSL_ALPN_HTTP11_HTTP2,
|
||||
SSLALPNProtocols,
|
||||
SSLCipherList,
|
||||
client_context,
|
||||
create_no_verify_ssl_context,
|
||||
@@ -28,9 +31,9 @@ from .frame import warn_use
|
||||
# and we want to keep the connection open for a while so we
|
||||
# don't have to reconnect every time so we use 15s to match aiohttp.
|
||||
KEEP_ALIVE_TIMEOUT = 15
|
||||
DATA_ASYNC_CLIENT: HassKey[httpx.AsyncClient] = HassKey("httpx_async_client")
|
||||
DATA_ASYNC_CLIENT_NOVERIFY: HassKey[httpx.AsyncClient] = HassKey(
|
||||
"httpx_async_client_noverify"
|
||||
# Shared httpx clients keyed by (verify_ssl, alpn_protocols)
|
||||
DATA_ASYNC_CLIENT: HassKey[dict[tuple[bool, SSLALPNProtocols], httpx.AsyncClient]] = (
|
||||
HassKey("httpx_async_client")
|
||||
)
|
||||
DEFAULT_LIMITS = limits = httpx.Limits(keepalive_expiry=KEEP_ALIVE_TIMEOUT)
|
||||
SERVER_SOFTWARE = (
|
||||
@@ -42,15 +45,26 @@ USER_AGENT = "User-Agent"
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_async_client(hass: HomeAssistant, verify_ssl: bool = True) -> httpx.AsyncClient:
|
||||
def get_async_client(
|
||||
hass: HomeAssistant,
|
||||
verify_ssl: bool = True,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
|
||||
) -> httpx.AsyncClient:
|
||||
"""Return default httpx AsyncClient.
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
key = DATA_ASYNC_CLIENT if verify_ssl else DATA_ASYNC_CLIENT_NOVERIFY
|
||||
|
||||
if (client := hass.data.get(key)) is None:
|
||||
client = hass.data[key] = create_async_httpx_client(hass, verify_ssl)
|
||||
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 to get a client configured for HTTP/2.
|
||||
Clients are cached separately by ALPN protocol to ensure proper SSL context
|
||||
configuration (ALPN protocols differ between HTTP versions).
|
||||
"""
|
||||
client_key = (verify_ssl, alpn_protocols)
|
||||
clients = hass.data.setdefault(DATA_ASYNC_CLIENT, {})
|
||||
|
||||
if (client := clients.get(client_key)) is None:
|
||||
client = clients[client_key] = create_async_httpx_client(
|
||||
hass, verify_ssl, alpn_protocols=alpn_protocols
|
||||
)
|
||||
|
||||
return client
|
||||
|
||||
@@ -77,6 +91,7 @@ def create_async_httpx_client(
|
||||
verify_ssl: bool = True,
|
||||
auto_cleanup: bool = True,
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_HTTP11,
|
||||
**kwargs: Any,
|
||||
) -> httpx.AsyncClient:
|
||||
"""Create a new httpx.AsyncClient with kwargs, i.e. for cookies.
|
||||
@@ -84,13 +99,22 @@ def create_async_httpx_client(
|
||||
If auto_cleanup is False, the client will be
|
||||
automatically closed on homeassistant_stop.
|
||||
|
||||
Pass alpn_protocols=SSL_ALPN_HTTP11_HTTP2 for HTTP/2 support (automatically
|
||||
enables httpx http2 mode).
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
# Use the requested ALPN protocols directly to ensure proper SSL context
|
||||
# bucketing. httpx/httpcore mutates SSL contexts by calling set_alpn_protocols(),
|
||||
# so we pre-set the correct protocols to prevent shared context corruption.
|
||||
ssl_context = (
|
||||
client_context(ssl_cipher_list)
|
||||
client_context(ssl_cipher_list, alpn_protocols)
|
||||
if verify_ssl
|
||||
else create_no_verify_ssl_context(ssl_cipher_list)
|
||||
else create_no_verify_ssl_context(ssl_cipher_list, alpn_protocols)
|
||||
)
|
||||
# Enable httpx HTTP/2 mode when HTTP/2 protocol is requested
|
||||
if alpn_protocols == SSL_ALPN_HTTP11_HTTP2:
|
||||
kwargs.setdefault("http2", True)
|
||||
client = HassHttpXAsyncClient(
|
||||
verify=ssl_context,
|
||||
headers={USER_AGENT: SERVER_SOFTWARE},
|
||||
|
||||
@@ -1601,8 +1601,13 @@ class Script:
|
||||
):
|
||||
_referenced_extract_ids(data, target, referenced)
|
||||
|
||||
elif action == cv.SCRIPT_ACTION_CHECK_CONDITION:
|
||||
referenced |= condition.async_extract_targets(step, target)
|
||||
|
||||
elif action == cv.SCRIPT_ACTION_CHOOSE:
|
||||
for choice in step[CONF_CHOOSE]:
|
||||
for cond in choice[CONF_CONDITIONS]:
|
||||
referenced |= condition.async_extract_targets(cond, target)
|
||||
Script._find_referenced_target(
|
||||
target, referenced, choice[CONF_SEQUENCE]
|
||||
)
|
||||
@@ -1612,6 +1617,8 @@ class Script:
|
||||
)
|
||||
|
||||
elif action == cv.SCRIPT_ACTION_IF:
|
||||
for cond in step[CONF_IF]:
|
||||
referenced |= condition.async_extract_targets(cond, target)
|
||||
Script._find_referenced_target(target, referenced, step[CONF_THEN])
|
||||
if CONF_ELSE in step:
|
||||
Script._find_referenced_target(target, referenced, step[CONF_ELSE])
|
||||
|
||||
@@ -8,6 +8,17 @@ import ssl
|
||||
|
||||
import certifi
|
||||
|
||||
# Type alias for ALPN protocols tuple (None means no ALPN protocols set)
|
||||
type SSLALPNProtocols = tuple[str, ...] | None
|
||||
|
||||
# ALPN protocol configurations
|
||||
# No ALPN protocols - used for libraries that don't support/need ALPN (e.g., aioimap)
|
||||
SSL_ALPN_NONE: SSLALPNProtocols = None
|
||||
# HTTP/1.1 only - used by default and for aiohttp (which doesn't support HTTP/2)
|
||||
SSL_ALPN_HTTP11: SSLALPNProtocols = ("http/1.1",)
|
||||
# HTTP/1.1 with HTTP/2 support - used when httpx http2=True
|
||||
SSL_ALPN_HTTP11_HTTP2: SSLALPNProtocols = ("http/1.1", "h2")
|
||||
|
||||
|
||||
class SSLCipherList(StrEnum):
|
||||
"""SSL cipher lists."""
|
||||
@@ -64,7 +75,10 @@ SSL_CIPHER_LISTS = {
|
||||
|
||||
|
||||
@cache
|
||||
def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
|
||||
def _client_context_no_verify(
|
||||
ssl_cipher_list: SSLCipherList,
|
||||
alpn_protocols: SSLALPNProtocols,
|
||||
) -> ssl.SSLContext:
|
||||
# This is a copy of aiohttp's create_default_context() function, with the
|
||||
# ssl verify turned off.
|
||||
# https://github.com/aio-libs/aiohttp/blob/33953f110e97eecc707e1402daa8d543f38a189b/aiohttp/connector.py#L911
|
||||
@@ -78,12 +92,18 @@ def _client_context_no_verify(ssl_cipher_list: SSLCipherList) -> ssl.SSLContext:
|
||||
sslcontext.set_default_verify_paths()
|
||||
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
|
||||
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
|
||||
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
|
||||
# from mutating the shared SSL context with different protocol settings.
|
||||
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
|
||||
if alpn_protocols is not None:
|
||||
sslcontext.set_alpn_protocols(list(alpn_protocols))
|
||||
|
||||
return sslcontext
|
||||
|
||||
|
||||
def _create_client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an independent SSL context for making requests."""
|
||||
# Reuse environment variable definition from requests, since it's already a
|
||||
@@ -96,6 +116,11 @@ def _create_client_context(
|
||||
)
|
||||
if ssl_cipher_list != SSLCipherList.PYTHON_DEFAULT:
|
||||
sslcontext.set_ciphers(SSL_CIPHER_LISTS[ssl_cipher_list])
|
||||
# Set ALPN protocols to prevent downstream libraries (e.g., httpx/httpcore)
|
||||
# from mutating the shared SSL context with different protocol settings.
|
||||
# If alpn_protocols is None, don't set ALPN (for libraries like aioimap).
|
||||
if alpn_protocols is not None:
|
||||
sslcontext.set_alpn_protocols(list(alpn_protocols))
|
||||
|
||||
return sslcontext
|
||||
|
||||
@@ -103,63 +128,63 @@ def _create_client_context(
|
||||
@cache
|
||||
def _client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
# Cached version of _create_client_context
|
||||
return _create_client_context(ssl_cipher_list)
|
||||
return _create_client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
# Create this only once and reuse it
|
||||
_DEFAULT_SSL_CONTEXT = _client_context(SSLCipherList.PYTHON_DEFAULT)
|
||||
_DEFAULT_NO_VERIFY_SSL_CONTEXT = _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT)
|
||||
_NO_VERIFY_SSL_CONTEXTS = {
|
||||
SSLCipherList.INTERMEDIATE: _client_context_no_verify(SSLCipherList.INTERMEDIATE),
|
||||
SSLCipherList.MODERN: _client_context_no_verify(SSLCipherList.MODERN),
|
||||
SSLCipherList.INSECURE: _client_context_no_verify(SSLCipherList.INSECURE),
|
||||
}
|
||||
_SSL_CONTEXTS = {
|
||||
SSLCipherList.INTERMEDIATE: _client_context(SSLCipherList.INTERMEDIATE),
|
||||
SSLCipherList.MODERN: _client_context(SSLCipherList.MODERN),
|
||||
SSLCipherList.INSECURE: _client_context(SSLCipherList.INSECURE),
|
||||
}
|
||||
# Pre-warm the cache for ALL SSL context configurations at module load time.
|
||||
# This is critical because creating SSL contexts loads certificates from disk,
|
||||
# which is blocking I/O that must not happen in the event loop.
|
||||
_SSL_ALPN_PROTOCOLS = (SSL_ALPN_NONE, SSL_ALPN_HTTP11, SSL_ALPN_HTTP11_HTTP2)
|
||||
for _cipher in SSLCipherList:
|
||||
for _alpn in _SSL_ALPN_PROTOCOLS:
|
||||
_client_context(_cipher, _alpn)
|
||||
_client_context_no_verify(_cipher, _alpn)
|
||||
|
||||
|
||||
def get_default_context() -> ssl.SSLContext:
|
||||
"""Return the default SSL context."""
|
||||
return _DEFAULT_SSL_CONTEXT
|
||||
return _client_context(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
|
||||
|
||||
def get_default_no_verify_context() -> ssl.SSLContext:
|
||||
"""Return the default SSL context that does not verify the server certificate."""
|
||||
return _DEFAULT_NO_VERIFY_SSL_CONTEXT
|
||||
return _client_context_no_verify(SSLCipherList.PYTHON_DEFAULT, SSL_ALPN_HTTP11)
|
||||
|
||||
|
||||
def client_context_no_verify(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return a SSL context with no verification with a specific ssl cipher."""
|
||||
return _NO_VERIFY_SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_NO_VERIFY_SSL_CONTEXT)
|
||||
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an SSL context for making requests."""
|
||||
return _SSL_CONTEXTS.get(ssl_cipher_list, _DEFAULT_SSL_CONTEXT)
|
||||
return _client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def create_client_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an independent SSL context for making requests."""
|
||||
# This explicitly uses the non-cached version to create a client context
|
||||
return _create_client_context(ssl_cipher_list)
|
||||
return _create_client_context(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def create_no_verify_ssl_context(
|
||||
ssl_cipher_list: SSLCipherList = SSLCipherList.PYTHON_DEFAULT,
|
||||
alpn_protocols: SSLALPNProtocols = SSL_ALPN_NONE,
|
||||
) -> ssl.SSLContext:
|
||||
"""Return an SSL context that does not verify the server certificate."""
|
||||
return _client_context_no_verify(ssl_cipher_list)
|
||||
return _client_context_no_verify(ssl_cipher_list, alpn_protocols)
|
||||
|
||||
|
||||
def server_context_modern() -> ssl.SSLContext:
|
||||
|
||||
@@ -188,6 +188,52 @@ class BaseUnitConverter:
|
||||
return (from_unit in cls._UNIT_INVERSES) != (to_unit in cls._UNIT_INVERSES)
|
||||
|
||||
|
||||
class ApparentPowerConverter(BaseUnitConverter):
|
||||
"""Utility to convert apparent power values."""
|
||||
|
||||
UNIT_CLASS = "apparent_power"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
UnitOfApparentPower.MILLIVOLT_AMPERE: 1 * 1000,
|
||||
UnitOfApparentPower.VOLT_AMPERE: 1,
|
||||
UnitOfApparentPower.KILO_VOLT_AMPERE: 1 / 1000,
|
||||
}
|
||||
VALID_UNITS = {
|
||||
UnitOfApparentPower.MILLIVOLT_AMPERE,
|
||||
UnitOfApparentPower.VOLT_AMPERE,
|
||||
UnitOfApparentPower.KILO_VOLT_AMPERE,
|
||||
}
|
||||
|
||||
|
||||
class AreaConverter(BaseUnitConverter):
|
||||
"""Utility to convert area values."""
|
||||
|
||||
UNIT_CLASS = "area"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
UnitOfArea.SQUARE_METERS: 1,
|
||||
UnitOfArea.SQUARE_CENTIMETERS: 1 / _CM2_TO_M2,
|
||||
UnitOfArea.SQUARE_MILLIMETERS: 1 / _MM2_TO_M2,
|
||||
UnitOfArea.SQUARE_KILOMETERS: 1 / _KM2_TO_M2,
|
||||
UnitOfArea.SQUARE_INCHES: 1 / _IN2_TO_M2,
|
||||
UnitOfArea.SQUARE_FEET: 1 / _FT2_TO_M2,
|
||||
UnitOfArea.SQUARE_YARDS: 1 / _YD2_TO_M2,
|
||||
UnitOfArea.SQUARE_MILES: 1 / _MI2_TO_M2,
|
||||
UnitOfArea.ACRES: 1 / _ACRE_TO_M2,
|
||||
UnitOfArea.HECTARES: 1 / _HECTARE_TO_M2,
|
||||
}
|
||||
VALID_UNITS = set(UnitOfArea)
|
||||
|
||||
|
||||
class BloodGlucoseConcentrationConverter(BaseUnitConverter):
|
||||
"""Utility to convert blood glucose concentration values."""
|
||||
|
||||
UNIT_CLASS = "blood_glucose_concentration"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18,
|
||||
UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1,
|
||||
}
|
||||
VALID_UNITS = set(UnitOfBloodGlucoseConcentration)
|
||||
|
||||
|
||||
class CarbonMonoxideConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert carbon monoxide ratio to mass per volume.
|
||||
|
||||
@@ -213,36 +259,16 @@ class CarbonMonoxideConcentrationConverter(BaseUnitConverter):
|
||||
}
|
||||
|
||||
|
||||
class NitrogenDioxideConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert nitrogen dioxide ratio to mass per volume."""
|
||||
class ConductivityConverter(BaseUnitConverter):
|
||||
"""Utility to convert electric current values."""
|
||||
|
||||
UNIT_CLASS = "nitrogen_dioxide"
|
||||
UNIT_CLASS = "conductivity"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_BILLION: 1e9,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
|
||||
_NITROGEN_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
|
||||
),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class SulphurDioxideConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert sulphur dioxide ratio to mass per volume."""
|
||||
|
||||
UNIT_CLASS = "sulphur_dioxide"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_BILLION: 1e9,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
|
||||
_SULPHUR_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
|
||||
),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
UnitOfConductivity.MICROSIEMENS_PER_CM: 1,
|
||||
UnitOfConductivity.MILLISIEMENS_PER_CM: 1e-3,
|
||||
UnitOfConductivity.SIEMENS_PER_CM: 1e-6,
|
||||
}
|
||||
VALID_UNITS = set(UnitOfConductivity)
|
||||
|
||||
|
||||
class DataRateConverter(BaseUnitConverter):
|
||||
@@ -266,25 +292,6 @@ class DataRateConverter(BaseUnitConverter):
|
||||
VALID_UNITS = set(UnitOfDataRate)
|
||||
|
||||
|
||||
class AreaConverter(BaseUnitConverter):
|
||||
"""Utility to convert area values."""
|
||||
|
||||
UNIT_CLASS = "area"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
UnitOfArea.SQUARE_METERS: 1,
|
||||
UnitOfArea.SQUARE_CENTIMETERS: 1 / _CM2_TO_M2,
|
||||
UnitOfArea.SQUARE_MILLIMETERS: 1 / _MM2_TO_M2,
|
||||
UnitOfArea.SQUARE_KILOMETERS: 1 / _KM2_TO_M2,
|
||||
UnitOfArea.SQUARE_INCHES: 1 / _IN2_TO_M2,
|
||||
UnitOfArea.SQUARE_FEET: 1 / _FT2_TO_M2,
|
||||
UnitOfArea.SQUARE_YARDS: 1 / _YD2_TO_M2,
|
||||
UnitOfArea.SQUARE_MILES: 1 / _MI2_TO_M2,
|
||||
UnitOfArea.ACRES: 1 / _ACRE_TO_M2,
|
||||
UnitOfArea.HECTARES: 1 / _HECTARE_TO_M2,
|
||||
}
|
||||
VALID_UNITS = set(UnitOfArea)
|
||||
|
||||
|
||||
class DistanceConverter(BaseUnitConverter):
|
||||
"""Utility to convert distance values."""
|
||||
|
||||
@@ -313,27 +320,28 @@ class DistanceConverter(BaseUnitConverter):
|
||||
}
|
||||
|
||||
|
||||
class BloodGlucoseConcentrationConverter(BaseUnitConverter):
|
||||
"""Utility to convert blood glucose concentration values."""
|
||||
class DurationConverter(BaseUnitConverter):
|
||||
"""Utility to convert duration values."""
|
||||
|
||||
UNIT_CLASS = "blood_glucose_concentration"
|
||||
UNIT_CLASS = "duration"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18,
|
||||
UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1,
|
||||
UnitOfTime.MICROSECONDS: 1000000,
|
||||
UnitOfTime.MILLISECONDS: 1000,
|
||||
UnitOfTime.SECONDS: 1,
|
||||
UnitOfTime.MINUTES: 1 / _MIN_TO_SEC,
|
||||
UnitOfTime.HOURS: 1 / _HRS_TO_SECS,
|
||||
UnitOfTime.DAYS: 1 / _DAYS_TO_SECS,
|
||||
UnitOfTime.WEEKS: 1 / (7 * _DAYS_TO_SECS),
|
||||
}
|
||||
VALID_UNITS = set(UnitOfBloodGlucoseConcentration)
|
||||
|
||||
|
||||
class ConductivityConverter(BaseUnitConverter):
|
||||
"""Utility to convert electric current values."""
|
||||
|
||||
UNIT_CLASS = "conductivity"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
UnitOfConductivity.MICROSIEMENS_PER_CM: 1,
|
||||
UnitOfConductivity.MILLISIEMENS_PER_CM: 1e-3,
|
||||
UnitOfConductivity.SIEMENS_PER_CM: 1e-6,
|
||||
VALID_UNITS = {
|
||||
UnitOfTime.MICROSECONDS,
|
||||
UnitOfTime.MILLISECONDS,
|
||||
UnitOfTime.SECONDS,
|
||||
UnitOfTime.MINUTES,
|
||||
UnitOfTime.HOURS,
|
||||
UnitOfTime.DAYS,
|
||||
UnitOfTime.WEEKS,
|
||||
}
|
||||
VALID_UNITS = set(UnitOfConductivity)
|
||||
|
||||
|
||||
class ElectricCurrentConverter(BaseUnitConverter):
|
||||
@@ -462,19 +470,51 @@ class MassConverter(BaseUnitConverter):
|
||||
}
|
||||
|
||||
|
||||
class ApparentPowerConverter(BaseUnitConverter):
|
||||
"""Utility to convert apparent power values."""
|
||||
class MassVolumeConcentrationConverter(BaseUnitConverter):
|
||||
"""Utility to convert mass volume concentration values."""
|
||||
|
||||
UNIT_CLASS = "apparent_power"
|
||||
UNIT_CLASS = "concentration"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
UnitOfApparentPower.MILLIVOLT_AMPERE: 1 * 1000,
|
||||
UnitOfApparentPower.VOLT_AMPERE: 1,
|
||||
UnitOfApparentPower.KILO_VOLT_AMPERE: 1 / 1000,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: 1000000.0, # 1000 µg/m³ = 1 mg/m³
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: 1000.0, # 1000 mg/m³ = 1 g/m³
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER: 1.0,
|
||||
}
|
||||
VALID_UNITS = {
|
||||
UnitOfApparentPower.MILLIVOLT_AMPERE,
|
||||
UnitOfApparentPower.VOLT_AMPERE,
|
||||
UnitOfApparentPower.KILO_VOLT_AMPERE,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class NitrogenDioxideConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert nitrogen dioxide ratio to mass per volume."""
|
||||
|
||||
UNIT_CLASS = "nitrogen_dioxide"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_BILLION: 1e9,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
|
||||
_NITROGEN_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
|
||||
),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class OzoneConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert ozone ratio to mass per volume."""
|
||||
|
||||
UNIT_CLASS = "ozone"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_BILLION: 1e9,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
|
||||
_OZONE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
|
||||
),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
@@ -563,22 +603,6 @@ class ReactivePowerConverter(BaseUnitConverter):
|
||||
}
|
||||
|
||||
|
||||
class OzoneConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert ozone ratio to mass per volume."""
|
||||
|
||||
UNIT_CLASS = "ozone"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_BILLION: 1e9,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
|
||||
_OZONE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
|
||||
),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class SpeedConverter(BaseUnitConverter):
|
||||
"""Utility to convert speed values."""
|
||||
|
||||
@@ -679,6 +703,22 @@ class SpeedConverter(BaseUnitConverter):
|
||||
return float(0.836 * beaufort ** (3 / 2))
|
||||
|
||||
|
||||
class SulphurDioxideConcentrationConverter(BaseUnitConverter):
|
||||
"""Convert sulphur dioxide ratio to mass per volume."""
|
||||
|
||||
UNIT_CLASS = "sulphur_dioxide"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_PARTS_PER_BILLION: 1e9,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
|
||||
_SULPHUR_DIOXIDE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
|
||||
),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class TemperatureConverter(BaseUnitConverter):
|
||||
"""Utility to convert temperature values."""
|
||||
|
||||
@@ -849,22 +889,6 @@ class UnitlessRatioConverter(BaseUnitConverter):
|
||||
}
|
||||
|
||||
|
||||
class MassVolumeConcentrationConverter(BaseUnitConverter):
|
||||
"""Utility to convert mass volume concentration values."""
|
||||
|
||||
UNIT_CLASS = "concentration"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: 1000000.0, # 1000 µg/m³ = 1 mg/m³
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: 1000.0, # 1000 mg/m³ = 1 g/m³
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER: 1.0,
|
||||
}
|
||||
VALID_UNITS = {
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
}
|
||||
|
||||
|
||||
class VolumeConverter(BaseUnitConverter):
|
||||
"""Utility to convert volume values."""
|
||||
|
||||
@@ -927,27 +951,3 @@ class VolumeFlowRateConverter(BaseUnitConverter):
|
||||
UnitOfVolumeFlowRate.GALLONS_PER_DAY,
|
||||
UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND,
|
||||
}
|
||||
|
||||
|
||||
class DurationConverter(BaseUnitConverter):
|
||||
"""Utility to convert duration values."""
|
||||
|
||||
UNIT_CLASS = "duration"
|
||||
_UNIT_CONVERSION: dict[str | None, float] = {
|
||||
UnitOfTime.MICROSECONDS: 1000000,
|
||||
UnitOfTime.MILLISECONDS: 1000,
|
||||
UnitOfTime.SECONDS: 1,
|
||||
UnitOfTime.MINUTES: 1 / _MIN_TO_SEC,
|
||||
UnitOfTime.HOURS: 1 / _HRS_TO_SECS,
|
||||
UnitOfTime.DAYS: 1 / _DAYS_TO_SECS,
|
||||
UnitOfTime.WEEKS: 1 / (7 * _DAYS_TO_SECS),
|
||||
}
|
||||
VALID_UNITS = {
|
||||
UnitOfTime.MICROSECONDS,
|
||||
UnitOfTime.MILLISECONDS,
|
||||
UnitOfTime.SECONDS,
|
||||
UnitOfTime.MINUTES,
|
||||
UnitOfTime.HOURS,
|
||||
UnitOfTime.DAYS,
|
||||
UnitOfTime.WEEKS,
|
||||
}
|
||||
|
||||
@@ -676,7 +676,7 @@ exclude_lines = [
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
required-version = ">=0.13.0"
|
||||
required-version = ">=0.14.13"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
|
||||
12
requirements_all.txt
generated
12
requirements_all.txt
generated
@@ -334,7 +334,7 @@ aionanoleaf==0.2.1
|
||||
aionotion==2024.03.0
|
||||
|
||||
# homeassistant.components.ntfy
|
||||
aiontfy==0.6.1
|
||||
aiontfy==0.7.0
|
||||
|
||||
# homeassistant.components.nut
|
||||
aionut==4.3.4
|
||||
@@ -1104,7 +1104,7 @@ google-nest-sdm==9.1.2
|
||||
google-photos-library-api==0.12.1
|
||||
|
||||
# homeassistant.components.google_air_quality
|
||||
google_air_quality_api==2.1.2
|
||||
google_air_quality_api==3.0.0
|
||||
|
||||
# homeassistant.components.slide
|
||||
# homeassistant.components.slide_local
|
||||
@@ -1184,7 +1184,7 @@ hassil==3.5.0
|
||||
hdate[astral]==1.1.2
|
||||
|
||||
# homeassistant.components.hdfury
|
||||
hdfury==1.3.1
|
||||
hdfury==1.4.2
|
||||
|
||||
# homeassistant.components.heatmiser
|
||||
heatmiserV3==2.0.4
|
||||
@@ -1281,7 +1281,7 @@ ihcsdk==2.8.5
|
||||
imeon_inverter_api==0.4.0
|
||||
|
||||
# homeassistant.components.imgw_pib
|
||||
imgw_pib==1.6.0
|
||||
imgw_pib==2.0.1
|
||||
|
||||
# homeassistant.components.incomfort
|
||||
incomfort-client==0.6.11
|
||||
@@ -1909,7 +1909,7 @@ pyatag==0.3.5.3
|
||||
pyatmo==9.2.3
|
||||
|
||||
# homeassistant.components.apple_tv
|
||||
pyatv==0.16.1;python_version<'3.14'
|
||||
pyatv==0.17.0
|
||||
|
||||
# homeassistant.components.aussie_broadband
|
||||
pyaussiebb==0.1.5
|
||||
@@ -3080,7 +3080,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==10.0.0
|
||||
uiprotect==10.0.1
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
|
||||
12
requirements_test_all.txt
generated
12
requirements_test_all.txt
generated
@@ -319,7 +319,7 @@ aionanoleaf==0.2.1
|
||||
aionotion==2024.03.0
|
||||
|
||||
# homeassistant.components.ntfy
|
||||
aiontfy==0.6.1
|
||||
aiontfy==0.7.0
|
||||
|
||||
# homeassistant.components.nut
|
||||
aionut==4.3.4
|
||||
@@ -980,7 +980,7 @@ google-nest-sdm==9.1.2
|
||||
google-photos-library-api==0.12.1
|
||||
|
||||
# homeassistant.components.google_air_quality
|
||||
google_air_quality_api==2.1.2
|
||||
google_air_quality_api==3.0.0
|
||||
|
||||
# homeassistant.components.slide
|
||||
# homeassistant.components.slide_local
|
||||
@@ -1051,7 +1051,7 @@ hassil==3.5.0
|
||||
hdate[astral]==1.1.2
|
||||
|
||||
# homeassistant.components.hdfury
|
||||
hdfury==1.3.1
|
||||
hdfury==1.4.2
|
||||
|
||||
# homeassistant.components.here_travel_time
|
||||
here-routing==1.2.0
|
||||
@@ -1127,7 +1127,7 @@ igloohome-api==0.1.1
|
||||
imeon_inverter_api==0.4.0
|
||||
|
||||
# homeassistant.components.imgw_pib
|
||||
imgw_pib==1.6.0
|
||||
imgw_pib==2.0.1
|
||||
|
||||
# homeassistant.components.incomfort
|
||||
incomfort-client==0.6.11
|
||||
@@ -1637,7 +1637,7 @@ pyatag==0.3.5.3
|
||||
pyatmo==9.2.3
|
||||
|
||||
# homeassistant.components.apple_tv
|
||||
pyatv==0.16.1;python_version<'3.14'
|
||||
pyatv==0.17.0
|
||||
|
||||
# homeassistant.components.aussie_broadband
|
||||
pyaussiebb==0.1.5
|
||||
@@ -2577,7 +2577,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==10.0.0
|
||||
uiprotect==10.0.1
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
|
||||
2
requirements_test_pre_commit.txt
generated
2
requirements_test_pre_commit.txt
generated
@@ -1,5 +1,5 @@
|
||||
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
||||
|
||||
codespell==2.4.1
|
||||
ruff==0.13.0
|
||||
ruff==0.14.13
|
||||
yamllint==1.37.1
|
||||
|
||||
2
script/hassfest/docker/Dockerfile
generated
2
script/hassfest/docker/Dockerfile
generated
@@ -26,7 +26,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.9.17,source=/uv,target=/bin/uv \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
pipdeptree==2.26.1 \
|
||||
tqdm==4.67.1 \
|
||||
ruff==0.13.0
|
||||
ruff==0.14.13
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
||||
|
||||
@@ -117,7 +117,6 @@ FORBIDDEN_PACKAGE_EXCEPTIONS: dict[str, dict[str, set[str]]] = {
|
||||
"airthings": {"airthings-cloud": {"async-timeout"}},
|
||||
"ampio": {"asmog": {"async-timeout"}},
|
||||
"apache_kafka": {"aiokafka": {"async-timeout"}},
|
||||
"apple_tv": {"pyatv": {"async-timeout"}},
|
||||
"blackbird": {
|
||||
# https://github.com/koolsb/pyblackbird/issues/12
|
||||
# pyblackbird > pyserial-asyncio
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
"""Tests for Apple TV."""
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
# Make asserts in the common module display differences
|
||||
pytest.register_assert_rewrite("tests.components.apple_tv.common")
|
||||
# Make asserts in the common module display differences
|
||||
pytest.register_assert_rewrite("tests.components.apple_tv.common")
|
||||
|
||||
@@ -1,20 +1,14 @@
|
||||
"""Fixtures for component."""
|
||||
|
||||
from collections.abc import Generator
|
||||
import sys
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from pyatv import conf
|
||||
from pyatv.const import PairingRequirement, Protocol
|
||||
from pyatv.support import http
|
||||
import pytest
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
from pyatv import conf
|
||||
from pyatv.const import PairingRequirement, Protocol
|
||||
from pyatv.support import http
|
||||
|
||||
from .common import MockPairingHandler, airplay_service, create_conf, mrp_service
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
collect_ignore_glob = ["test_*.py"]
|
||||
from .common import MockPairingHandler, airplay_service, create_conf, mrp_service
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, name="mock_scan")
|
||||
|
||||
@@ -27,7 +27,10 @@ def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
def mock_config_entry(hass: HomeAssistant, mock_arve: MagicMock) -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
title="Arve", domain=DOMAIN, data=USER_INPUT, unique_id=mock_arve.customer_id
|
||||
title="Arve",
|
||||
domain=DOMAIN,
|
||||
data=USER_INPUT,
|
||||
unique_id=str(mock_arve.customer_id),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ async def test_correct_flow(
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result2["data"] == USER_INPUT
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert result2["result"].unique_id == 12345
|
||||
assert result2["result"].unique_id == "12345"
|
||||
|
||||
|
||||
async def test_form_cannot_connect(
|
||||
|
||||
26
tests/components/arve/test_init.py
Normal file
26
tests/components/arve/test_init.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Tests for the Arve component."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.arve.const import DOMAIN
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_SECRET
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None:
|
||||
"""Test migrating a 1.1 config entry to 1.2."""
|
||||
with patch("homeassistant.components.arve.async_setup_entry", return_value=True):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={CONF_ACCESS_TOKEN: "mock", CONF_CLIENT_SECRET: "mock"},
|
||||
version=1,
|
||||
minor_version=1,
|
||||
unique_id=12345,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
assert entry.version == 1
|
||||
assert entry.minor_version == 2
|
||||
assert entry.unique_id == "12345"
|
||||
@@ -1 +1,13 @@
|
||||
"""Tests for the GitHub integration."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None:
|
||||
"""Method for setting up the component."""
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
"""Common helpers for GitHub integration tests."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry, async_load_fixture
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
MOCK_ACCESS_TOKEN = "gho_16C7e42F292c6912E7710c838347Ae178B4a"
|
||||
TEST_REPOSITORY = "octocat/Hello-World"
|
||||
|
||||
|
||||
async def setup_github_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
add_entry_to_hass: bool = True,
|
||||
) -> None:
|
||||
"""Mock setting up the integration."""
|
||||
headers = json.loads(await async_load_fixture(hass, "base_headers.json", DOMAIN))
|
||||
for idx, repository in enumerate(mock_config_entry.options[CONF_REPOSITORIES]):
|
||||
aioclient_mock.get(
|
||||
f"https://api.github.com/repos/{repository}",
|
||||
json={
|
||||
**json.loads(await async_load_fixture(hass, "repository.json", DOMAIN)),
|
||||
"full_name": repository,
|
||||
"id": idx,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
aioclient_mock.get(
|
||||
f"https://api.github.com/repos/{repository}/events",
|
||||
json=[],
|
||||
headers=headers,
|
||||
)
|
||||
aioclient_mock.post(
|
||||
"https://api.github.com/graphql",
|
||||
json=json.loads(await async_load_fixture(hass, "graphql.json", DOMAIN)),
|
||||
headers=headers,
|
||||
)
|
||||
if add_entry_to_hass:
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
setup_result = await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert setup_result
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
@@ -1,18 +1,27 @@
|
||||
"""conftest for the GitHub integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from aiogithubapi import (
|
||||
GitHubLoginDeviceModel,
|
||||
GitHubLoginOauthModel,
|
||||
GitHubRateLimitModel,
|
||||
)
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .common import MOCK_ACCESS_TOKEN, TEST_REPOSITORY, setup_github_integration
|
||||
from .const import MOCK_ACCESS_TOKEN, TEST_REPOSITORY
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
async_load_json_object_fixture,
|
||||
load_json_object_fixture,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -34,11 +43,93 @@ def mock_setup_entry() -> Generator[None]:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
def device_activation_event() -> asyncio.Event:
|
||||
"""Fixture to provide an asyncio event for device activation."""
|
||||
return asyncio.Event()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def github_device_client(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the GitHub integration for testing."""
|
||||
await setup_github_integration(hass, mock_config_entry, aioclient_mock)
|
||||
return mock_config_entry
|
||||
device_activation_event: asyncio.Event,
|
||||
) -> Generator[AsyncMock]:
|
||||
"""Mock GitHub device client."""
|
||||
with patch(
|
||||
"homeassistant.components.github.config_flow.GitHubDeviceAPI",
|
||||
autospec=True,
|
||||
) as github_client_mock:
|
||||
client = github_client_mock.return_value
|
||||
register_object = AsyncMock()
|
||||
register_object.data = GitHubLoginDeviceModel(
|
||||
load_json_object_fixture("device_register.json", DOMAIN)
|
||||
)
|
||||
client.register.return_value = register_object
|
||||
|
||||
async def mock_api_device_activation(device_code) -> AsyncMock:
|
||||
# Simulate the device activation process
|
||||
await device_activation_event.wait()
|
||||
activate_object = AsyncMock()
|
||||
activate_object.data = GitHubLoginOauthModel(
|
||||
await async_load_json_object_fixture(
|
||||
hass, "device_activate.json", DOMAIN
|
||||
)
|
||||
)
|
||||
return activate_object
|
||||
|
||||
client.activation = mock_api_device_activation
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def github_client(hass: HomeAssistant) -> Generator[AsyncMock]:
|
||||
"""Mock GitHub device client."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.github.config_flow.GitHubAPI",
|
||||
autospec=True,
|
||||
) as github_client_mock,
|
||||
patch("homeassistant.components.github.GitHubAPI", new=github_client_mock),
|
||||
patch(
|
||||
"homeassistant.components.github.diagnostics.GitHubAPI",
|
||||
new=github_client_mock,
|
||||
),
|
||||
):
|
||||
client = github_client_mock.return_value
|
||||
client.user.starred = AsyncMock(
|
||||
side_effect=[
|
||||
MagicMock(
|
||||
is_last_page=False,
|
||||
next_page_number=2,
|
||||
last_page_number=2,
|
||||
data=[MagicMock(full_name="home-assistant/core")],
|
||||
),
|
||||
MagicMock(
|
||||
is_last_page=True,
|
||||
data=[MagicMock(full_name="home-assistant/frontend")],
|
||||
),
|
||||
]
|
||||
)
|
||||
client.user.repos = AsyncMock(
|
||||
side_effect=[
|
||||
MagicMock(
|
||||
is_last_page=False,
|
||||
next_page_number=2,
|
||||
last_page_number=2,
|
||||
data=[MagicMock(full_name="home-assistant/operating-system")],
|
||||
),
|
||||
MagicMock(
|
||||
is_last_page=True,
|
||||
data=[MagicMock(full_name="esphome/esphome")],
|
||||
),
|
||||
]
|
||||
)
|
||||
rate_limit_mock = AsyncMock()
|
||||
rate_limit_mock.data = GitHubRateLimitModel(
|
||||
load_json_object_fixture("rate_limit.json", DOMAIN)
|
||||
)
|
||||
client.rate_limit.return_value = rate_limit_mock
|
||||
graphql_mock = AsyncMock()
|
||||
graphql_mock.data = load_json_object_fixture("graphql.json", DOMAIN)
|
||||
client.graphql.return_value = graphql_mock
|
||||
client.repos.events.subscribe = AsyncMock()
|
||||
yield client
|
||||
|
||||
4
tests/components/github/const.py
Normal file
4
tests/components/github/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for GitHub integration tests."""
|
||||
|
||||
MOCK_ACCESS_TOKEN = "gho_16C7e42F292c6912E7710c838347Ae178B4a"
|
||||
TEST_REPOSITORY = "octocat/Hello-World"
|
||||
@@ -1,29 +0,0 @@
|
||||
{
|
||||
"Server": "GitHub.com",
|
||||
"Date": "Mon, 1 Jan 1970 00:00:00 GMT",
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Cache-Control": "private, max-age=60, s-maxage=60",
|
||||
"Vary": "Accept, Authorization, Cookie, X-GitHub-OTP",
|
||||
"Etag": "W/\"1234567890abcdefghijklmnopqrstuvwxyz\"",
|
||||
"X-OAuth-Scopes": "",
|
||||
"X-Accepted-OAuth-Scopes": "",
|
||||
"github-authentication-token-expiration": "1970-01-01 01:00:00 UTC",
|
||||
"X-GitHub-Media-Type": "github.v3; param=raw; format=json",
|
||||
"X-RateLimit-Limit": "5000",
|
||||
"X-RateLimit-Remaining": "4999",
|
||||
"X-RateLimit-Reset": "1",
|
||||
"X-RateLimit-Used": "1",
|
||||
"X-RateLimit-Resource": "core",
|
||||
"Access-Control-Expose-Headers": "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, Deprecation, Sunset",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload",
|
||||
"X-Frame-Options": "deny",
|
||||
"X-Content-Type-Options": "nosniff",
|
||||
"X-XSS-Protection": "0",
|
||||
"Referrer-Policy": "origin-when-cross-origin, strict-origin-when-cross-origin",
|
||||
"Content-Security-Policy": "default-src 'none'",
|
||||
"Content-Encoding": "gzip",
|
||||
"Permissions-Policy": "",
|
||||
"X-GitHub-Request-Id": "12A3:45BC:6D7890:12EF34:5678G901"
|
||||
}
|
||||
5
tests/components/github/fixtures/device_activate.json
Normal file
5
tests/components/github/fixtures/device_activate.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"access_token": "gho_16C7e42F292c6912E7710c838347Ae178B4a",
|
||||
"token_type": "bearer",
|
||||
"scope": ""
|
||||
}
|
||||
7
tests/components/github/fixtures/device_register.json
Normal file
7
tests/components/github/fixtures/device_register.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"device_code": "3584d83530557fdd1f46af8289938c8ef79f9dc5",
|
||||
"user_code": "WDJB-MJHT",
|
||||
"verification_uri": "https://github.com/login/device",
|
||||
"expires_in": 900,
|
||||
"interval": 5
|
||||
}
|
||||
1
tests/components/github/fixtures/rate_limit.json
Normal file
1
tests/components/github/fixtures/rate_limit.json
Normal file
@@ -0,0 +1 @@
|
||||
{ "resources": { "core": { "remaining": 100, "limit": 100 } } }
|
||||
@@ -1,146 +1,100 @@
|
||||
"""Test the GitHub config flow."""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
import asyncio
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from aiogithubapi import GitHubException
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.github.config_flow import get_repositories
|
||||
from homeassistant.components.github.const import (
|
||||
CONF_REPOSITORIES,
|
||||
DEFAULT_REPOSITORIES,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType, UnknownFlow
|
||||
|
||||
from .common import MOCK_ACCESS_TOKEN
|
||||
from .const import MOCK_ACCESS_TOKEN
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
|
||||
async def test_full_user_flow_implementation(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: None,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
github_device_client: AsyncMock,
|
||||
github_client: AsyncMock,
|
||||
device_activation_event: asyncio.Event,
|
||||
) -> None:
|
||||
"""Test the full manual user flow from start to finish."""
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/device/code",
|
||||
json={
|
||||
"device_code": "3584d83530557fdd1f46af8289938c8ef79f9dc5",
|
||||
"user_code": "WDJB-MJHT",
|
||||
"verification_uri": "https://github.com/login/device",
|
||||
"expires_in": 900,
|
||||
"interval": 5,
|
||||
},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
# User has not yet entered the code
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/oauth/access_token",
|
||||
json={"error": "authorization_pending"},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_USER},
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["step_id"] == "device"
|
||||
assert result["type"] is FlowResultType.SHOW_PROGRESS
|
||||
|
||||
# User enters the code
|
||||
aioclient_mock.clear_requests()
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/oauth/access_token",
|
||||
json={
|
||||
CONF_ACCESS_TOKEN: MOCK_ACCESS_TOKEN,
|
||||
"token_type": "bearer",
|
||||
"scope": "",
|
||||
},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
freezer.tick(10)
|
||||
device_activation_event.set()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert result["step_id"] == "repositories"
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert not result["errors"]
|
||||
|
||||
schema = result["data_schema"]
|
||||
repositories = schema.schema[CONF_REPOSITORIES].options
|
||||
assert len(repositories) == 4
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
CONF_REPOSITORIES: DEFAULT_REPOSITORIES,
|
||||
},
|
||||
result["flow_id"], user_input={CONF_REPOSITORIES: DEFAULT_REPOSITORIES}
|
||||
)
|
||||
|
||||
assert result["title"] == ""
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert "data" in result
|
||||
assert result["data"][CONF_ACCESS_TOKEN] == MOCK_ACCESS_TOKEN
|
||||
assert "options" in result
|
||||
assert result["options"][CONF_REPOSITORIES] == DEFAULT_REPOSITORIES
|
||||
assert result["data"] == {CONF_ACCESS_TOKEN: MOCK_ACCESS_TOKEN}
|
||||
assert result["options"] == {CONF_REPOSITORIES: DEFAULT_REPOSITORIES}
|
||||
|
||||
|
||||
async def test_flow_with_registration_failure(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
github_device_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test flow with registration failure of the device."""
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/device/code",
|
||||
exc=GitHubException("Registration failed"),
|
||||
)
|
||||
github_device_client.register.side_effect = GitHubException("Registration failed")
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_USER},
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result.get("reason") == "could_not_register"
|
||||
assert result["reason"] == "could_not_register"
|
||||
|
||||
|
||||
async def test_flow_with_activation_failure(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
github_device_client: AsyncMock,
|
||||
device_activation_event: asyncio.Event,
|
||||
) -> None:
|
||||
"""Test flow with activation failure of the device."""
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/device/code",
|
||||
json={
|
||||
"device_code": "3584d83530557fdd1f46af8289938c8ef79f9dc5",
|
||||
"user_code": "WDJB-MJHT",
|
||||
"verification_uri": "https://github.com/login/device",
|
||||
"expires_in": 900,
|
||||
"interval": 5,
|
||||
},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
# User has not yet entered the code
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/oauth/access_token",
|
||||
json={"error": "authorization_pending"},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
async def mock_api_device_activation(device_code) -> None:
|
||||
# Simulate the device activation process
|
||||
await device_activation_event.wait()
|
||||
raise GitHubException("Activation failed")
|
||||
|
||||
github_device_client.activation = mock_api_device_activation
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_USER},
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["step_id"] == "device"
|
||||
assert result["type"] is FlowResultType.SHOW_PROGRESS
|
||||
|
||||
# Activation fails
|
||||
aioclient_mock.clear_requests()
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/oauth/access_token",
|
||||
exc=GitHubException("Activation failed"),
|
||||
)
|
||||
freezer.tick(10)
|
||||
device_activation_event.set()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
@@ -149,30 +103,14 @@ async def test_flow_with_activation_failure(
|
||||
|
||||
|
||||
async def test_flow_with_remove_while_activating(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
hass: HomeAssistant, github_device_client: AsyncMock
|
||||
) -> None:
|
||||
"""Test flow with user canceling while activating."""
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/device/code",
|
||||
json={
|
||||
"device_code": "3584d83530557fdd1f46af8289938c8ef79f9dc5",
|
||||
"user_code": "WDJB-MJHT",
|
||||
"verification_uri": "https://github.com/login/device",
|
||||
"expires_in": 900,
|
||||
"interval": 5,
|
||||
},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
aioclient_mock.post(
|
||||
"https://github.com/login/oauth/access_token",
|
||||
json={"error": "authorization_pending"},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_USER},
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["step_id"] == "device"
|
||||
assert result["type"] is FlowResultType.SHOW_PROGRESS
|
||||
|
||||
@@ -194,84 +132,88 @@ async def test_already_configured(
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_USER},
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result.get("reason") == "already_configured"
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_starred_pagination_with_paginated_result(hass: HomeAssistant) -> None:
|
||||
"""Test pagination of starred repositories with paginated result."""
|
||||
with patch(
|
||||
"homeassistant.components.github.config_flow.GitHubAPI",
|
||||
return_value=MagicMock(
|
||||
user=MagicMock(
|
||||
starred=AsyncMock(
|
||||
return_value=MagicMock(
|
||||
is_last_page=False,
|
||||
next_page_number=2,
|
||||
last_page_number=2,
|
||||
data=[MagicMock(full_name="home-assistant/core")],
|
||||
)
|
||||
),
|
||||
repos=AsyncMock(
|
||||
return_value=MagicMock(
|
||||
is_last_page=False,
|
||||
next_page_number=2,
|
||||
last_page_number=2,
|
||||
data=[MagicMock(full_name="awesome/reposiotry")],
|
||||
)
|
||||
),
|
||||
)
|
||||
),
|
||||
):
|
||||
repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
|
||||
async def test_no_repositories(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: None,
|
||||
github_device_client: AsyncMock,
|
||||
github_client: AsyncMock,
|
||||
device_activation_event: asyncio.Event,
|
||||
) -> None:
|
||||
"""Test the full manual user flow from start to finish."""
|
||||
|
||||
assert len(repos) == 2
|
||||
assert repos[-1] == DEFAULT_REPOSITORIES[0]
|
||||
github_client.user.repos.side_effect = [MagicMock(is_last_page=True, data=[])]
|
||||
github_client.user.starred.side_effect = [MagicMock(is_last_page=True, data=[])]
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["step_id"] == "device"
|
||||
assert result["type"] is FlowResultType.SHOW_PROGRESS
|
||||
|
||||
device_activation_event.set()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert result["step_id"] == "repositories"
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert not result["errors"]
|
||||
|
||||
schema = result["data_schema"]
|
||||
repositories = schema.schema[CONF_REPOSITORIES].options
|
||||
assert len(repositories) == 2
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_REPOSITORIES: DEFAULT_REPOSITORIES}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
|
||||
|
||||
async def test_starred_pagination_with_no_starred(hass: HomeAssistant) -> None:
|
||||
"""Test pagination of starred repositories with no starred."""
|
||||
with patch(
|
||||
"homeassistant.components.github.config_flow.GitHubAPI",
|
||||
return_value=MagicMock(
|
||||
user=MagicMock(
|
||||
starred=AsyncMock(
|
||||
return_value=MagicMock(
|
||||
is_last_page=True,
|
||||
data=[],
|
||||
)
|
||||
),
|
||||
repos=AsyncMock(
|
||||
return_value=MagicMock(
|
||||
is_last_page=True,
|
||||
data=[],
|
||||
)
|
||||
),
|
||||
)
|
||||
),
|
||||
):
|
||||
repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
|
||||
async def test_exception_during_repository_fetch(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: None,
|
||||
github_device_client: AsyncMock,
|
||||
github_client: AsyncMock,
|
||||
device_activation_event: asyncio.Event,
|
||||
) -> None:
|
||||
"""Test the full manual user flow from start to finish."""
|
||||
|
||||
assert len(repos) == 2
|
||||
assert repos == DEFAULT_REPOSITORIES
|
||||
github_client.user.repos.side_effect = GitHubException()
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
async def test_starred_pagination_with_exception(hass: HomeAssistant) -> None:
|
||||
"""Test pagination of starred repositories with exception."""
|
||||
with patch(
|
||||
"homeassistant.components.github.config_flow.GitHubAPI",
|
||||
return_value=MagicMock(
|
||||
user=MagicMock(starred=AsyncMock(side_effect=GitHubException("Error")))
|
||||
),
|
||||
):
|
||||
repos = await get_repositories(hass, MOCK_ACCESS_TOKEN)
|
||||
assert result["step_id"] == "device"
|
||||
assert result["type"] is FlowResultType.SHOW_PROGRESS
|
||||
|
||||
assert len(repos) == 2
|
||||
assert repos == DEFAULT_REPOSITORIES
|
||||
device_activation_event.set()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert result["step_id"] == "repositories"
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert not result["errors"]
|
||||
|
||||
schema = result["data_schema"]
|
||||
repositories = schema.schema[CONF_REPOSITORIES].options
|
||||
assert len(repositories) == 2
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_REPOSITORIES: DEFAULT_REPOSITORIES}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
|
||||
|
||||
async def test_options_flow(
|
||||
|
||||
@@ -1,89 +1,56 @@
|
||||
"""Test GitHub diagnostics."""
|
||||
|
||||
import json
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from aiogithubapi import GitHubException
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .common import setup_github_integration
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry, async_load_fixture
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.diagnostics import get_diagnostics_for_config_entry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
github_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test config entry diagnostics."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
hass.config_entries.async_update_entry(
|
||||
mock_config_entry,
|
||||
options={CONF_REPOSITORIES: ["home-assistant/core"]},
|
||||
)
|
||||
response_json = json.loads(await async_load_fixture(hass, "graphql.json", DOMAIN))
|
||||
response_json["data"]["repository"]["full_name"] = "home-assistant/core"
|
||||
|
||||
aioclient_mock.post(
|
||||
"https://api.github.com/graphql",
|
||||
json=response_json,
|
||||
headers=json.loads(await async_load_fixture(hass, "base_headers.json", DOMAIN)),
|
||||
)
|
||||
aioclient_mock.get(
|
||||
"https://api.github.com/rate_limit",
|
||||
json={"resources": {"core": {"remaining": 100, "limit": 100}}},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
await setup_github_integration(
|
||||
hass, mock_config_entry, aioclient_mock, add_entry_to_hass=False
|
||||
)
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
result = await get_diagnostics_for_config_entry(
|
||||
hass,
|
||||
hass_client,
|
||||
mock_config_entry,
|
||||
)
|
||||
|
||||
assert result["options"]["repositories"] == ["home-assistant/core"]
|
||||
assert result["options"]["repositories"] == ["octocat/Hello-World"]
|
||||
assert result["rate_limit"] == {
|
||||
"resources": {"core": {"remaining": 100, "limit": 100}}
|
||||
}
|
||||
assert (
|
||||
result["repositories"]["home-assistant/core"]["full_name"]
|
||||
== "home-assistant/core"
|
||||
result["repositories"]["octocat/Hello-World"]["full_name"]
|
||||
== "octocat/Hello-World"
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_entry_diagnostics_exception(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
init_integration: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
github_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test config entry diagnostics with exception for ratelimit."""
|
||||
aioclient_mock.get(
|
||||
"https://api.github.com/rate_limit",
|
||||
exc=GitHubException("error"),
|
||||
)
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
github_client.rate_limit.side_effect = GitHubException("error")
|
||||
|
||||
result = await get_diagnostics_for_config_entry(
|
||||
hass,
|
||||
hass_client,
|
||||
init_integration,
|
||||
mock_config_entry,
|
||||
)
|
||||
|
||||
assert (
|
||||
result["rate_limit"]["error"]
|
||||
== "Unexpected exception for 'https://api.github.com/rate_limit' with - error"
|
||||
)
|
||||
assert result["rate_limit"]["error"] == "error"
|
||||
|
||||
@@ -1,24 +1,23 @@
|
||||
"""Test the GitHub init file."""
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.github import CONF_REPOSITORIES
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er, icon
|
||||
|
||||
from .common import setup_github_integration
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_device_registry_cleanup(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
github_client: AsyncMock,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that we remove untracked repositories from the device registry."""
|
||||
@@ -27,9 +26,7 @@ async def test_device_registry_cleanup(
|
||||
mock_config_entry,
|
||||
options={CONF_REPOSITORIES: ["home-assistant/core"]},
|
||||
)
|
||||
await setup_github_integration(
|
||||
hass, mock_config_entry, aioclient_mock, add_entry_to_hass=False
|
||||
)
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
devices = dr.async_entries_for_config_entry(
|
||||
registry=device_registry,
|
||||
@@ -58,12 +55,10 @@ async def test_device_registry_cleanup(
|
||||
assert len(devices) == 0
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_subscription_setup(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
github_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test that we setup event subscription."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
@@ -72,21 +67,14 @@ async def test_subscription_setup(
|
||||
options={CONF_REPOSITORIES: ["home-assistant/core"]},
|
||||
pref_disable_polling=False,
|
||||
)
|
||||
await setup_github_integration(
|
||||
hass, mock_config_entry, aioclient_mock, add_entry_to_hass=False
|
||||
)
|
||||
assert (
|
||||
"https://api.github.com/repos/home-assistant/core/events" in x[1]
|
||||
for x in aioclient_mock.mock_calls
|
||||
)
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
github_client.repos.events.subscribe.assert_called_once()
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_subscription_setup_polling_disabled(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
github_client: AsyncMock,
|
||||
) -> None:
|
||||
"""Test that we do not setup event subscription if polling is disabled."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
@@ -95,13 +83,8 @@ async def test_subscription_setup_polling_disabled(
|
||||
options={CONF_REPOSITORIES: ["home-assistant/core"]},
|
||||
pref_disable_polling=True,
|
||||
)
|
||||
await setup_github_integration(
|
||||
hass, mock_config_entry, aioclient_mock, add_entry_to_hass=False
|
||||
)
|
||||
assert (
|
||||
"https://api.github.com/repos/home-assistant/core/events" not in x[1]
|
||||
for x in aioclient_mock.mock_calls
|
||||
)
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
github_client.repos.events.subscribe.assert_not_called()
|
||||
|
||||
# Prove that we subscribed if the user enabled polling again
|
||||
hass.config_entries.async_update_entry(
|
||||
@@ -109,23 +92,20 @@ async def test_subscription_setup_polling_disabled(
|
||||
)
|
||||
assert await hass.config_entries.async_reload(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
"https://api.github.com/repos/home-assistant/core/events" in x[1]
|
||||
for x in aioclient_mock.mock_calls
|
||||
)
|
||||
github_client.repos.events.subscribe.assert_called_once()
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_sensor_icons(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
github_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test to ensure that all sensor entities have an icon definition."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
entities = er.async_entries_for_config_entry(
|
||||
entity_registry,
|
||||
config_entry_id=init_integration.entry_id,
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
)
|
||||
|
||||
icons = await icon.async_get_icons(hass, "entity", integrations=["github"])
|
||||
|
||||
@@ -1,50 +1,36 @@
|
||||
"""Test GitHub sensor."""
|
||||
|
||||
import json
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
|
||||
from homeassistant.components.github.const import DOMAIN, FALLBACK_UPDATE_INTERVAL
|
||||
from homeassistant.components.github.const import FALLBACK_UPDATE_INTERVAL
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .common import TEST_REPOSITORY
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, async_load_fixture
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
TEST_SENSOR_ENTITY = "sensor.octocat_hello_world_latest_release"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_sensor_updates_with_empty_release_array(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
github_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test the sensor updates by default GitHub sensors."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
state = hass.states.get(TEST_SENSOR_ENTITY)
|
||||
assert state.state == "v1.0.0"
|
||||
|
||||
response_json = json.loads(await async_load_fixture(hass, "graphql.json", DOMAIN))
|
||||
response_json["data"]["repository"]["release"] = None
|
||||
headers = json.loads(await async_load_fixture(hass, "base_headers.json", DOMAIN))
|
||||
github_client.graphql.return_value.data["data"]["repository"]["release"] = None
|
||||
|
||||
aioclient_mock.clear_requests()
|
||||
aioclient_mock.get(
|
||||
f"https://api.github.com/repos/{TEST_REPOSITORY}/events",
|
||||
json=[],
|
||||
headers=headers,
|
||||
)
|
||||
aioclient_mock.post(
|
||||
"https://api.github.com/graphql",
|
||||
json=response_json,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + FALLBACK_UPDATE_INTERVAL)
|
||||
freezer.tick(FALLBACK_UPDATE_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
new_state = hass.states.get(TEST_SENSOR_ENTITY)
|
||||
assert new_state.state == "unavailable"
|
||||
assert new_state.state == STATE_UNAVAILABLE
|
||||
|
||||
@@ -132,6 +132,9 @@
|
||||
'name': None,
|
||||
'object_id_base': 'Carbon monoxide',
|
||||
'options': dict({
|
||||
'sensor.private': dict({
|
||||
'suggested_unit_of_measurement': 'ppm',
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.CO: 'carbon_monoxide'>,
|
||||
'original_icon': None,
|
||||
@@ -481,14 +484,14 @@
|
||||
'object_id_base': 'Ozone',
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_device_class': <SensorDeviceClass.OZONE: 'ozone'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Ozone',
|
||||
'platform': 'google_air_quality',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'ozone',
|
||||
'translation_key': None,
|
||||
'unique_id': 'o3_10.1_20.1',
|
||||
'unit_of_measurement': 'ppb',
|
||||
})
|
||||
@@ -497,6 +500,7 @@
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by Google Air Quality',
|
||||
'device_class': 'ozone',
|
||||
'friendly_name': 'Home Ozone',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': 'ppb',
|
||||
|
||||
@@ -130,3 +130,44 @@ async def test_hassio_addon_panel_api(
|
||||
"test1",
|
||||
{"enable": True, "title": "Test", "icon": "mdi:test", "admin": False},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("hassio_env")
|
||||
async def test_hassio_addon_panel_registration(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test panel registration calls frontend.async_register_built_in_panel."""
|
||||
aioclient_mock.get(
|
||||
"http://127.0.0.1/ingress/panels",
|
||||
json={
|
||||
"result": "ok",
|
||||
"data": {
|
||||
"panels": {
|
||||
"test_addon": {
|
||||
"enable": True,
|
||||
"title": "Test Addon",
|
||||
"icon": "mdi:test-tube",
|
||||
"admin": True,
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.hassio.addon_panel.frontend.async_register_built_in_panel"
|
||||
) as mock_register:
|
||||
await async_setup_component(hass, "hassio", {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify that async_register_built_in_panel was called with correct arguments
|
||||
# for our test addon
|
||||
mock_register.assert_any_call(
|
||||
hass,
|
||||
"app",
|
||||
frontend_url_path="test_addon",
|
||||
sidebar_title="Test Addon",
|
||||
sidebar_icon="mdi:test-tube",
|
||||
require_admin=True,
|
||||
config={"addon": "test_addon"},
|
||||
)
|
||||
|
||||
@@ -4,11 +4,7 @@ from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_MEDIA_VOLUME_LEVEL,
|
||||
ATTR_MEDIA_VOLUME_MUTED,
|
||||
MediaPlayerState,
|
||||
)
|
||||
from homeassistant.components.media_player import MediaPlayerState
|
||||
from homeassistant.const import ATTR_LABEL_ID, CONF_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
|
||||
@@ -47,52 +43,6 @@ async def test_media_player_triggers_gated_by_labs_flag(
|
||||
) in caplog.text
|
||||
|
||||
|
||||
def parametrize_muted_trigger_states() -> list[tuple[str, list[StateDescription]]]:
|
||||
"""Parametrize states and expected service call counts.
|
||||
|
||||
Returns a list of tuples with (trigger, initial_state, list of states), where
|
||||
states is a list of tuples (state to set, expected service call count).
|
||||
"""
|
||||
trigger = "media_player.muted"
|
||||
return parametrize_trigger_states(
|
||||
trigger=trigger,
|
||||
target_states=[
|
||||
# States with muted attribute
|
||||
(MediaPlayerState.PLAYING, {ATTR_MEDIA_VOLUME_MUTED: True}),
|
||||
# States with volume attribute
|
||||
(MediaPlayerState.PLAYING, {ATTR_MEDIA_VOLUME_LEVEL: 0}),
|
||||
# States with muted and volume attribute
|
||||
(
|
||||
MediaPlayerState.PLAYING,
|
||||
{ATTR_MEDIA_VOLUME_LEVEL: 0, ATTR_MEDIA_VOLUME_MUTED: True},
|
||||
),
|
||||
(
|
||||
MediaPlayerState.PLAYING,
|
||||
{ATTR_MEDIA_VOLUME_LEVEL: 0, ATTR_MEDIA_VOLUME_MUTED: False},
|
||||
),
|
||||
(
|
||||
MediaPlayerState.PLAYING,
|
||||
{ATTR_MEDIA_VOLUME_LEVEL: 1, ATTR_MEDIA_VOLUME_MUTED: True},
|
||||
),
|
||||
],
|
||||
other_states=[
|
||||
# States with muted attribute
|
||||
(MediaPlayerState.PLAYING, {ATTR_MEDIA_VOLUME_MUTED: False}),
|
||||
(MediaPlayerState.PLAYING, {ATTR_MEDIA_VOLUME_MUTED: None}),
|
||||
(MediaPlayerState.PLAYING, {}), # Missing attribute
|
||||
# States with volume attribute
|
||||
(MediaPlayerState.PLAYING, {ATTR_MEDIA_VOLUME_LEVEL: 1}),
|
||||
(MediaPlayerState.PLAYING, {ATTR_MEDIA_VOLUME_LEVEL: None}),
|
||||
(MediaPlayerState.PLAYING, {}), # Missing attribute
|
||||
# States with muted and volume attribute
|
||||
(
|
||||
MediaPlayerState.PLAYING,
|
||||
{ATTR_MEDIA_VOLUME_LEVEL: 1, ATTR_MEDIA_VOLUME_MUTED: False},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_labs_preview_features")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
@@ -161,56 +111,6 @@ async def test_media_player_state_trigger_behavior_any(
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
[
|
||||
*parametrize_muted_trigger_states(),
|
||||
],
|
||||
)
|
||||
async def test_media_player_state_attribute_trigger_behavior_any(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_media_players: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the media player state trigger fires when any media player state changes to a specific state."""
|
||||
await async_setup_component(hass, "media player", {})
|
||||
|
||||
other_entity_ids = set(target_media_players) - {entity_id}
|
||||
|
||||
# Set all media players, including the tested media player, to the initial state
|
||||
for eid in target_media_players:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(hass, trigger, {}, trigger_target_config)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
# Check if changing other media players also triggers
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == (entities_in_target - 1) * state["count"]
|
||||
service_calls.clear()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities("media_player"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="media_player.stopped_playing",
|
||||
@@ -271,60 +171,6 @@ async def test_media_player_state_trigger_behavior_first(
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "trigger_options", "states"),
|
||||
[
|
||||
*parametrize_muted_trigger_states(),
|
||||
],
|
||||
)
|
||||
async def test_media_player_state_attribute_trigger_behavior_first(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_media_players: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the media player state trigger fires when the first media player state changes to a specific state."""
|
||||
await async_setup_component(hass, "media_player", {})
|
||||
|
||||
other_entity_ids = set(target_media_players) - {entity_id}
|
||||
|
||||
# Set all media players, including the tested media player, to the initial state
|
||||
for eid in target_media_players:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(
|
||||
hass,
|
||||
trigger,
|
||||
{"behavior": "first"},
|
||||
trigger_target_config,
|
||||
)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
# Triggering other media players should not cause the trigger to fire again
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities("media_player"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_trigger_states(
|
||||
trigger="media_player.stopped_playing",
|
||||
@@ -375,51 +221,3 @@ async def test_media_player_state_trigger_behavior_last(
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_experimental_triggers_conditions")
|
||||
@pytest.mark.parametrize(
|
||||
("trigger_target_config", "entity_id", "entities_in_target"),
|
||||
parametrize_target_entities("media_player"),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
("trigger", "states"),
|
||||
[
|
||||
*parametrize_muted_trigger_states(),
|
||||
],
|
||||
)
|
||||
async def test_media_player_state_attribute_trigger_behavior_last(
|
||||
hass: HomeAssistant,
|
||||
service_calls: list[ServiceCall],
|
||||
target_media_players: list[str],
|
||||
trigger_target_config: dict,
|
||||
entity_id: str,
|
||||
entities_in_target: int,
|
||||
trigger: str,
|
||||
states: list[StateDescription],
|
||||
) -> None:
|
||||
"""Test that the media player state trigger fires when the last media player state changes to a specific state."""
|
||||
await async_setup_component(hass, "media_player", {})
|
||||
|
||||
other_entity_ids = set(target_media_players) - {entity_id}
|
||||
|
||||
# Set all media players, including the tested media player, to the initial state
|
||||
for eid in target_media_players:
|
||||
set_or_remove_state(hass, eid, states[0]["included"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await arm_trigger(hass, trigger, {"behavior": "last"}, trigger_target_config)
|
||||
|
||||
for state in states[1:]:
|
||||
included_state = state["included"]
|
||||
for other_entity_id in other_entity_ids:
|
||||
set_or_remove_state(hass, other_entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == 0
|
||||
|
||||
set_or_remove_state(hass, entity_id, included_state)
|
||||
await hass.async_block_till_done()
|
||||
assert len(service_calls) == state["count"]
|
||||
for service_call in service_calls:
|
||||
assert service_call.data[CONF_ENTITY_ID] == entity_id
|
||||
service_calls.clear()
|
||||
|
||||
@@ -59,7 +59,7 @@ def mock_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry:
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title=TITLE,
|
||||
unique_id=54321,
|
||||
unique_id="54321",
|
||||
data={
|
||||
"auth_implementation": DOMAIN,
|
||||
"token": {
|
||||
|
||||
@@ -74,7 +74,7 @@ async def test_full_flow(
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "test@microbees.com"
|
||||
assert "result" in result
|
||||
assert result["result"].unique_id == 54321
|
||||
assert result["result"].unique_id == "54321"
|
||||
assert "token" in result["result"].data
|
||||
assert result["result"].data["token"]["access_token"] == "mock-access-token"
|
||||
assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token"
|
||||
@@ -197,7 +197,7 @@ async def test_config_reauth_wrong_account(
|
||||
) -> None:
|
||||
"""Test reauth with wrong account."""
|
||||
await setup_integration(hass, config_entry)
|
||||
microbees.return_value.getMyProfile.return_value.id = 12345
|
||||
microbees.return_value.getMyProfile.return_value.id = "12345"
|
||||
result = await config_entry.start_reauth_flow(hass)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reauth_confirm"
|
||||
|
||||
35
tests/components/microbees/test_init.py
Normal file
35
tests/components/microbees/test_init.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Tests for the microBees component."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.microbees.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None:
|
||||
"""Test migrating a 1.1 config entry to 1.2."""
|
||||
with patch(
|
||||
"homeassistant.components.microbees.async_setup_entry", return_value=True
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"auth_implementation": DOMAIN,
|
||||
"token": {
|
||||
"refresh_token": "mock-refresh-token",
|
||||
"access_token": "mock-access-token",
|
||||
"type": "Bearer",
|
||||
"expires_in": 60,
|
||||
},
|
||||
},
|
||||
version=1,
|
||||
minor_version=1,
|
||||
unique_id=54321,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
assert entry.version == 1
|
||||
assert entry.minor_version == 2
|
||||
assert entry.unique_id == "54321"
|
||||
@@ -244,7 +244,7 @@ async def test_config_reauth_wrong_account(
|
||||
"access_token": "mock-access-token",
|
||||
"type": "Bearer",
|
||||
"expires_in": 60,
|
||||
"user_id": 12346,
|
||||
"user_id": "12346",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Tests for component initialisation."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import AsyncMock
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
from monzopy import AuthorisationExpiredError
|
||||
@@ -35,3 +35,29 @@ async def test_api_can_trigger_reauth(
|
||||
assert flow["step_id"] == "reauth_confirm"
|
||||
assert flow["handler"] == DOMAIN
|
||||
assert flow["context"]["source"] == SOURCE_REAUTH
|
||||
|
||||
|
||||
async def test_migrate_entry_minor_version_1_2(hass: HomeAssistant) -> None:
|
||||
"""Test migrating a 1.1 config entry to 1.2."""
|
||||
with patch("homeassistant.components.monzo.async_setup_entry", return_value=True):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"auth_implementation": DOMAIN,
|
||||
"token": {
|
||||
"refresh_token": "mock-refresh-token",
|
||||
"access_token": "mock-access-token",
|
||||
"type": "Bearer",
|
||||
"expires_in": 60,
|
||||
"user_id": "600",
|
||||
},
|
||||
},
|
||||
version=1,
|
||||
minor_version=1,
|
||||
unique_id=600,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
assert entry.version == 1
|
||||
assert entry.minor_version == 2
|
||||
assert entry.unique_id == "600"
|
||||
|
||||
@@ -57,6 +57,7 @@ def mock_aiontfy() -> Generator[AsyncMock]:
|
||||
actions=[],
|
||||
attachment=None,
|
||||
content_type=None,
|
||||
sequence_id="Mc3otamDNcpJ",
|
||||
)
|
||||
|
||||
resp.to_dict.return_value = {
|
||||
@@ -74,6 +75,7 @@ def mock_aiontfy() -> Generator[AsyncMock]:
|
||||
"actions": [],
|
||||
"attachment": None,
|
||||
"content_type": None,
|
||||
"sequence_id": "Mc3otamDNcpJ",
|
||||
}
|
||||
|
||||
async def mock_ws(
|
||||
|
||||
@@ -59,6 +59,7 @@
|
||||
'id': 'h6Y2hKA5sy0U',
|
||||
'message': 'Hello',
|
||||
'priority': 3,
|
||||
'sequence_id': 'Mc3otamDNcpJ',
|
||||
'tags': list([
|
||||
'octopus',
|
||||
]),
|
||||
|
||||
@@ -101,6 +101,7 @@ async def test_event(
|
||||
"time": datetime(2025, 3, 28, 17, 58, 46, tzinfo=UTC),
|
||||
"title": "Title",
|
||||
"topic": "mytopic",
|
||||
"sequence_id": "Mc3otamDNcpJ",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ from homeassistant.components.ntfy.notify import (
|
||||
ATTR_ICON,
|
||||
ATTR_MARKDOWN,
|
||||
ATTR_PRIORITY,
|
||||
ATTR_SEQUENCE_ID,
|
||||
ATTR_TAGS,
|
||||
SERVICE_PUBLISH,
|
||||
)
|
||||
@@ -60,6 +61,7 @@ async def test_ntfy_publish(
|
||||
ATTR_MARKDOWN: True,
|
||||
ATTR_PRIORITY: "5",
|
||||
ATTR_TAGS: ["partying_face", "grin"],
|
||||
ATTR_SEQUENCE_ID: "Mc3otamDNcpJ",
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
@@ -76,6 +78,7 @@ async def test_ntfy_publish(
|
||||
markdown=True,
|
||||
icon=URL("https://example.org/logo.png"),
|
||||
delay="86430.0s",
|
||||
sequence_id="Mc3otamDNcpJ",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -52,10 +52,13 @@ def make_test_trigger(*entities: str) -> dict:
|
||||
|
||||
|
||||
async def async_trigger(
|
||||
hass: HomeAssistant, entity_id: str, state: str | None = None
|
||||
hass: HomeAssistant,
|
||||
entity_id: str,
|
||||
state: str | None = None,
|
||||
attributes: dict | None = None,
|
||||
) -> None:
|
||||
"""Trigger a state change."""
|
||||
hass.states.async_set(entity_id, state)
|
||||
hass.states.async_set(entity_id, state, attributes)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user