mirror of
https://github.com/home-assistant/core.git
synced 2025-08-04 21:25:13 +02:00
Merge branch 'dev' into async-2
This commit is contained in:
@@ -10,7 +10,7 @@
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"ms-python.black-formatter",
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.pylint",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
@@ -39,7 +39,10 @@
|
||||
"!include_dir_list scalar",
|
||||
"!include_dir_merge_list scalar",
|
||||
"!include_dir_merge_named scalar"
|
||||
]
|
||||
],
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -60,7 +60,7 @@
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] I have followed the [development checklist][dev-checklist]
|
||||
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
|
||||
- [ ] The code has been formatted using Black (`black --fast homeassistant tests`)
|
||||
- [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
|
||||
If user exposed functionality or configuration variables are added/changed:
|
||||
|
64
.github/workflows/ci.yaml
vendored
64
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,6 @@ env:
|
||||
CACHE_VERSION: 5
|
||||
PIP_CACHE_VERSION: 4
|
||||
MYPY_CACHE_VERSION: 6
|
||||
BLACK_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2023.12"
|
||||
DEFAULT_PYTHON: "3.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.11', '3.12']"
|
||||
@@ -58,7 +57,6 @@ env:
|
||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
PIP_CACHE: /tmp/pip-cache
|
||||
BLACK_CACHE: /tmp/black-cache
|
||||
SQLALCHEMY_WARN_20: 1
|
||||
PYTHONASYNCIODEBUG: 1
|
||||
HASS_CI: 1
|
||||
@@ -261,8 +259,8 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-black:
|
||||
name: Check black
|
||||
lint-ruff-format:
|
||||
name: Check ruff-format
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- info
|
||||
@@ -276,13 +274,6 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Generate partial black restore key
|
||||
id: generate-black-key
|
||||
run: |
|
||||
black_version=$(cat requirements_test_pre_commit.txt | grep black | cut -d '=' -f 3)
|
||||
echo "version=$black_version" >> $GITHUB_OUTPUT
|
||||
echo "key=black-${{ env.BLACK_CACHE_VERSION }}-$black_version-${{
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.2
|
||||
@@ -301,33 +292,12 @@ jobs:
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore black cache
|
||||
uses: actions/cache@v3.3.2
|
||||
with:
|
||||
path: ${{ env.BLACK_CACHE }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-black-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-black-${{
|
||||
env.BLACK_CACHE_VERSION }}-${{ steps.generate-black-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Run black (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
env:
|
||||
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
|
||||
- name: Run ruff-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual black --all-files --show-diff-on-failure
|
||||
- name: Run black (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||
env:
|
||||
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual black --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
@@ -362,22 +332,12 @@ jobs:
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Register ruff problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/ruff.json"
|
||||
- name: Run ruff (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||
- name: Run ruff (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual ruff --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
|
||||
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
lint-other:
|
||||
name: Check other linters
|
||||
runs-on: ubuntu-22.04
|
||||
@@ -787,7 +747,7 @@ jobs:
|
||||
cov_params+=(--cov-report=xml)
|
||||
fi
|
||||
|
||||
python3 -X dev -m pytest \
|
||||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
--durations=10 \
|
||||
@@ -824,7 +784,7 @@ jobs:
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
fi
|
||||
|
||||
python3 -X dev -m pytest \
|
||||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
-n auto \
|
||||
@@ -945,7 +905,7 @@ jobs:
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
fi
|
||||
|
||||
python3 -X dev -m pytest \
|
||||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=20 \
|
||||
-n 1 \
|
||||
@@ -1069,7 +1029,7 @@ jobs:
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
fi
|
||||
|
||||
python3 -X dev -m pytest \
|
||||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
-n 1 \
|
||||
|
30
.github/workflows/matchers/ruff.json
vendored
30
.github/workflows/matchers/ruff.json
vendored
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "ruff-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([EF]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "ruff-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@@ -1,16 +1,11 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.1.1
|
||||
rev: v0.1.6
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
- --fix
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 23.11.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --quiet
|
||||
- id: ruff-format
|
||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.2
|
||||
|
6
.vscode/extensions.json
vendored
6
.vscode/extensions.json
vendored
@@ -1,3 +1,7 @@
|
||||
{
|
||||
"recommendations": ["esbenp.prettier-vscode", "ms-python.python"]
|
||||
"recommendations": [
|
||||
"charliermarsh.ruff",
|
||||
"esbenp.prettier-vscode",
|
||||
"ms-python.python"
|
||||
]
|
||||
}
|
||||
|
@@ -5,8 +5,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# Uninstall pre-installed formatting and linting tools
|
||||
# They would conflict with our pinned versions
|
||||
RUN \
|
||||
pipx uninstall black \
|
||||
&& pipx uninstall pydocstyle \
|
||||
pipx uninstall pydocstyle \
|
||||
&& pipx uninstall pycodestyle \
|
||||
&& pipx uninstall mypy \
|
||||
&& pipx uninstall pylint
|
||||
|
@@ -5,9 +5,7 @@ from collections.abc import Mapping
|
||||
|
||||
ValueType = (
|
||||
# Example: entities.all = { read: true, control: true }
|
||||
Mapping[str, bool]
|
||||
| bool
|
||||
| None
|
||||
Mapping[str, bool] | bool | None
|
||||
)
|
||||
|
||||
# Example: entities.domains = { light: … }
|
||||
|
@@ -10,10 +10,11 @@ from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import async_get_hass, callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from ..models import Credentials, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
@@ -21,10 +22,28 @@ from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
AUTH_PROVIDER_TYPE = "legacy_api_password"
|
||||
CONF_API_PASSWORD = "api_password"
|
||||
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
||||
_CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
||||
{vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA
|
||||
)
|
||||
|
||||
|
||||
def _create_repair_and_validate(config: dict[str, Any]) -> dict[str, Any]:
|
||||
async_create_issue(
|
||||
async_get_hass(),
|
||||
"auth",
|
||||
"deprecated_legacy_api_password",
|
||||
breaks_in_ha_version="2024.6.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_legacy_api_password",
|
||||
)
|
||||
|
||||
return _CONFIG_SCHEMA(config) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
CONFIG_SCHEMA = _create_repair_and_validate
|
||||
|
||||
|
||||
LEGACY_USER_NAME = "Legacy API password user"
|
||||
|
||||
|
||||
|
@@ -1315,9 +1315,9 @@ class PipelineInput:
|
||||
if stt_audio_buffer:
|
||||
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
||||
# This is basically an async itertools.chain.
|
||||
async def buffer_then_audio_stream() -> AsyncGenerator[
|
||||
ProcessedAudioChunk, None
|
||||
]:
|
||||
async def buffer_then_audio_stream() -> (
|
||||
AsyncGenerator[ProcessedAudioChunk, None]
|
||||
):
|
||||
# Buffered audio
|
||||
for chunk in stt_audio_buffer:
|
||||
yield chunk
|
||||
|
@@ -417,8 +417,7 @@ async def websocket_device_capture(
|
||||
# single sample (16 bits) per queue item.
|
||||
max_queue_items = (
|
||||
# +1 for None to signal end
|
||||
int(math.ceil(timeout_seconds * CAPTURE_RATE))
|
||||
+ 1
|
||||
int(math.ceil(timeout_seconds * CAPTURE_RATE)) + 1
|
||||
)
|
||||
|
||||
audio_queue = DeviceAudioQueue(queue=asyncio.Queue(maxsize=max_queue_items))
|
||||
|
@@ -31,5 +31,11 @@
|
||||
"invalid_code": "Invalid code, please try again."
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_legacy_api_password": {
|
||||
"title": "The legacy API password is deprecated",
|
||||
"description": "The legacy API password authentication provider is deprecated and will be removed. Please remove it from your YAML configuration and use the default Home Assistant authentication provider instead."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -44,7 +44,8 @@ SELECT_TYPES: dict[str, BMWSelectEntityDescription] = {
|
||||
translation_key="ac_limit",
|
||||
is_available=lambda v: v.is_remote_set_ac_limit_enabled,
|
||||
dynamic_options=lambda v: [
|
||||
str(lim) for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr]
|
||||
str(lim)
|
||||
for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr]
|
||||
],
|
||||
current_option=lambda v: str(v.charging_profile.ac_current_limit), # type: ignore[union-attr]
|
||||
remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update(
|
||||
|
@@ -140,7 +140,7 @@ def _ws_handle_cloud_errors(
|
||||
handler: Callable[
|
||||
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
|
||||
Coroutine[None, None, None],
|
||||
]
|
||||
],
|
||||
) -> Callable[
|
||||
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
|
||||
Coroutine[None, None, None],
|
||||
@@ -362,8 +362,11 @@ def _require_cloud_login(
|
||||
handler: Callable[
|
||||
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
|
||||
None,
|
||||
]
|
||||
) -> Callable[[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], None,]:
|
||||
],
|
||||
) -> Callable[
|
||||
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
|
||||
None,
|
||||
]:
|
||||
"""Websocket decorator that requires cloud to be logged in."""
|
||||
|
||||
@wraps(handler)
|
||||
|
@@ -129,9 +129,8 @@ class DeconzDevice(DeconzBase[_DeviceT], Entity):
|
||||
if self.gateway.ignore_state_updates:
|
||||
return
|
||||
|
||||
if (
|
||||
self._update_keys is not None
|
||||
and not self._device.changed_keys.intersection(self._update_keys)
|
||||
if self._update_keys is not None and not self._device.changed_keys.intersection(
|
||||
self._update_keys
|
||||
):
|
||||
return
|
||||
|
||||
|
@@ -63,7 +63,8 @@ async def async_setup_entry( # noqa: C901
|
||||
)
|
||||
await device.async_connect(session_instance=async_client)
|
||||
device.password = entry.data.get(
|
||||
CONF_PASSWORD, "" # This key was added in HA Core 2022.6
|
||||
CONF_PASSWORD,
|
||||
"", # This key was added in HA Core 2022.6
|
||||
)
|
||||
except DeviceNotFound as err:
|
||||
raise ConfigEntryNotReady(
|
||||
|
@@ -453,10 +453,9 @@ class DlnaDmrEntity(MediaPlayerEntity):
|
||||
for state_variable in state_variables:
|
||||
# Force a state refresh when player begins or pauses playback
|
||||
# to update the position info.
|
||||
if (
|
||||
state_variable.name == "TransportState"
|
||||
and state_variable.value
|
||||
in (TransportState.PLAYING, TransportState.PAUSED_PLAYBACK)
|
||||
if state_variable.name == "TransportState" and state_variable.value in (
|
||||
TransportState.PLAYING,
|
||||
TransportState.PAUSED_PLAYBACK,
|
||||
):
|
||||
force_refresh = True
|
||||
|
||||
|
@@ -441,9 +441,7 @@ async def async_setup_entry(
|
||||
description,
|
||||
entry,
|
||||
telegram,
|
||||
*device_class_and_uom(
|
||||
telegram, description
|
||||
), # type: ignore[arg-type]
|
||||
*device_class_and_uom(telegram, description), # type: ignore[arg-type]
|
||||
)
|
||||
for description in all_sensors
|
||||
if (
|
||||
|
@@ -18,13 +18,11 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_COMMAND_BY_MOTION_STATUS = (
|
||||
{ # Maps the stop command to use for every cover motion status
|
||||
_COMMAND_BY_MOTION_STATUS = { # Maps the stop command to use for every cover motion status
|
||||
CoverStatus.DOWN: CoverCommand.DOWN,
|
||||
CoverStatus.UP: CoverCommand.UP,
|
||||
CoverStatus.IDLE: None,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@@ -14,9 +14,7 @@ class EsphomeEnumMapper(Generic[_EnumT, _ValT]):
|
||||
def __init__(self, mapping: dict[_EnumT, _ValT]) -> None:
|
||||
"""Construct a EsphomeEnumMapper."""
|
||||
# Add none mapping
|
||||
augmented_mapping: dict[
|
||||
_EnumT | None, _ValT | None
|
||||
] = mapping # type: ignore[assignment]
|
||||
augmented_mapping: dict[_EnumT | None, _ValT | None] = mapping # type: ignore[assignment]
|
||||
augmented_mapping[None] = None
|
||||
|
||||
self._mapping = augmented_mapping
|
||||
|
@@ -117,7 +117,8 @@ class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity):
|
||||
"""Return the current speed percentage."""
|
||||
if not self._supports_speed_levels:
|
||||
return ordered_list_item_to_percentage(
|
||||
ORDERED_NAMED_FAN_SPEEDS, self._state.speed # type: ignore[misc]
|
||||
ORDERED_NAMED_FAN_SPEEDS,
|
||||
self._state.speed, # type: ignore[misc]
|
||||
)
|
||||
|
||||
return ranged_value_to_percentage(
|
||||
|
@@ -124,11 +124,14 @@ def convert_dict(dictionary: dict[str, Any]) -> dict[str, Any]:
|
||||
def convert_key(key: str) -> str:
|
||||
"""Convert a string to snake_case."""
|
||||
string = re.sub(r"[\-\.\s]", "_", str(key))
|
||||
return (string[0]).lower() + re.sub(
|
||||
return (
|
||||
(string[0]).lower()
|
||||
+ re.sub(
|
||||
r"[A-Z]",
|
||||
lambda matched: f"_{matched.group(0).lower()}", # type:ignore[str-bytes-safe]
|
||||
string[1:],
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
(convert_key(k) if isinstance(k, str) else k): (
|
||||
|
@@ -1,7 +1,9 @@
|
||||
"""Constants for the Fronius integration."""
|
||||
from enum import StrEnum
|
||||
from typing import Final, NamedTuple, TypedDict
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
DOMAIN: Final = "fronius"
|
||||
|
||||
@@ -25,3 +27,97 @@ class FroniusDeviceInfo(NamedTuple):
|
||||
device_info: DeviceInfo
|
||||
solar_net_id: SolarNetId
|
||||
unique_id: str
|
||||
|
||||
|
||||
class InverterStatusCodeOption(StrEnum):
|
||||
"""Status codes for Fronius inverters."""
|
||||
|
||||
# these are keys for state translations - so snake_case is used
|
||||
STARTUP = "startup"
|
||||
RUNNING = "running"
|
||||
STANDBY = "standby"
|
||||
BOOTLOADING = "bootloading"
|
||||
ERROR = "error"
|
||||
IDLE = "idle"
|
||||
READY = "ready"
|
||||
SLEEPING = "sleeping"
|
||||
UNKNOWN = "unknown"
|
||||
INVALID = "invalid"
|
||||
|
||||
|
||||
_INVERTER_STATUS_CODES: Final[dict[int, InverterStatusCodeOption]] = {
|
||||
0: InverterStatusCodeOption.STARTUP,
|
||||
1: InverterStatusCodeOption.STARTUP,
|
||||
2: InverterStatusCodeOption.STARTUP,
|
||||
3: InverterStatusCodeOption.STARTUP,
|
||||
4: InverterStatusCodeOption.STARTUP,
|
||||
5: InverterStatusCodeOption.STARTUP,
|
||||
6: InverterStatusCodeOption.STARTUP,
|
||||
7: InverterStatusCodeOption.RUNNING,
|
||||
8: InverterStatusCodeOption.STANDBY,
|
||||
9: InverterStatusCodeOption.BOOTLOADING,
|
||||
10: InverterStatusCodeOption.ERROR,
|
||||
11: InverterStatusCodeOption.IDLE,
|
||||
12: InverterStatusCodeOption.READY,
|
||||
13: InverterStatusCodeOption.SLEEPING,
|
||||
255: InverterStatusCodeOption.UNKNOWN,
|
||||
}
|
||||
|
||||
|
||||
def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption:
|
||||
"""Return a status message for a given status code."""
|
||||
return _INVERTER_STATUS_CODES.get(code, InverterStatusCodeOption.INVALID) # type: ignore[arg-type]
|
||||
|
||||
|
||||
class MeterLocationCodeOption(StrEnum):
|
||||
"""Meter location codes for Fronius meters."""
|
||||
|
||||
# these are keys for state translations - so snake_case is used
|
||||
FEED_IN = "feed_in"
|
||||
CONSUMPTION_PATH = "consumption_path"
|
||||
GENERATOR = "external_generator"
|
||||
EXT_BATTERY = "external_battery"
|
||||
SUBLOAD = "subload"
|
||||
|
||||
|
||||
def get_meter_location_description(code: StateType) -> MeterLocationCodeOption | None:
|
||||
"""Return a location_description for a given location code."""
|
||||
match int(code): # type: ignore[arg-type]
|
||||
case 0:
|
||||
return MeterLocationCodeOption.FEED_IN
|
||||
case 1:
|
||||
return MeterLocationCodeOption.CONSUMPTION_PATH
|
||||
case 3:
|
||||
return MeterLocationCodeOption.GENERATOR
|
||||
case 4:
|
||||
return MeterLocationCodeOption.EXT_BATTERY
|
||||
case _ as _code if 256 <= _code <= 511:
|
||||
return MeterLocationCodeOption.SUBLOAD
|
||||
return None
|
||||
|
||||
|
||||
class OhmPilotStateCodeOption(StrEnum):
|
||||
"""OhmPilot state codes for Fronius inverters."""
|
||||
|
||||
# these are keys for state translations - so snake_case is used
|
||||
UP_AND_RUNNING = "up_and_running"
|
||||
KEEP_MINIMUM_TEMPERATURE = "keep_minimum_temperature"
|
||||
LEGIONELLA_PROTECTION = "legionella_protection"
|
||||
CRITICAL_FAULT = "critical_fault"
|
||||
FAULT = "fault"
|
||||
BOOST_MODE = "boost_mode"
|
||||
|
||||
|
||||
_OHMPILOT_STATE_CODES: Final[dict[int, OhmPilotStateCodeOption]] = {
|
||||
0: OhmPilotStateCodeOption.UP_AND_RUNNING,
|
||||
1: OhmPilotStateCodeOption.KEEP_MINIMUM_TEMPERATURE,
|
||||
2: OhmPilotStateCodeOption.LEGIONELLA_PROTECTION,
|
||||
3: OhmPilotStateCodeOption.CRITICAL_FAULT,
|
||||
4: OhmPilotStateCodeOption.FAULT,
|
||||
5: OhmPilotStateCodeOption.BOOST_MODE,
|
||||
}
|
||||
|
||||
|
||||
def get_ohmpilot_state_message(code: StateType) -> OhmPilotStateCodeOption | None:
|
||||
"""Return a status message for a given status code."""
|
||||
return _OHMPILOT_STATE_CODES.get(code) # type: ignore[arg-type]
|
||||
|
@@ -49,8 +49,10 @@ class FroniusCoordinatorBase(
|
||||
"""Set up the FroniusCoordinatorBase class."""
|
||||
self._failed_update_count = 0
|
||||
self.solar_net = solar_net
|
||||
# unregistered_keys are used to create entities in platform module
|
||||
self.unregistered_keys: dict[SolarNetId, set[str]] = {}
|
||||
# unregistered_descriptors are used to create entities in platform module
|
||||
self.unregistered_descriptors: dict[
|
||||
SolarNetId, list[FroniusSensorEntityDescription]
|
||||
] = {}
|
||||
super().__init__(*args, update_interval=self.default_interval, **kwargs)
|
||||
|
||||
@abstractmethod
|
||||
@@ -73,11 +75,11 @@ class FroniusCoordinatorBase(
|
||||
self.update_interval = self.default_interval
|
||||
|
||||
for solar_net_id in data:
|
||||
if solar_net_id not in self.unregistered_keys:
|
||||
if solar_net_id not in self.unregistered_descriptors:
|
||||
# id seen for the first time
|
||||
self.unregistered_keys[solar_net_id] = {
|
||||
desc.key for desc in self.valid_descriptions
|
||||
}
|
||||
self.unregistered_descriptors[
|
||||
solar_net_id
|
||||
] = self.valid_descriptions.copy()
|
||||
return data
|
||||
|
||||
@callback
|
||||
@@ -92,22 +94,34 @@ class FroniusCoordinatorBase(
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _add_entities_for_unregistered_keys() -> None:
|
||||
def _add_entities_for_unregistered_descriptors() -> None:
|
||||
"""Add entities for keys seen for the first time."""
|
||||
new_entities: list = []
|
||||
new_entities: list[_FroniusEntityT] = []
|
||||
for solar_net_id, device_data in self.data.items():
|
||||
for key in self.unregistered_keys[solar_net_id].intersection(
|
||||
device_data
|
||||
):
|
||||
if device_data[key]["value"] is None:
|
||||
remaining_unregistered_descriptors = []
|
||||
for description in self.unregistered_descriptors[solar_net_id]:
|
||||
key = description.response_key or description.key
|
||||
if key not in device_data:
|
||||
remaining_unregistered_descriptors.append(description)
|
||||
continue
|
||||
new_entities.append(entity_constructor(self, key, solar_net_id))
|
||||
self.unregistered_keys[solar_net_id].remove(key)
|
||||
if device_data[key]["value"] is None:
|
||||
remaining_unregistered_descriptors.append(description)
|
||||
continue
|
||||
new_entities.append(
|
||||
entity_constructor(
|
||||
coordinator=self,
|
||||
description=description,
|
||||
solar_net_id=solar_net_id,
|
||||
)
|
||||
)
|
||||
self.unregistered_descriptors[
|
||||
solar_net_id
|
||||
] = remaining_unregistered_descriptors
|
||||
async_add_entities(new_entities)
|
||||
|
||||
_add_entities_for_unregistered_keys()
|
||||
_add_entities_for_unregistered_descriptors()
|
||||
self.solar_net.cleanup_callbacks.append(
|
||||
self.async_add_listener(_add_entities_for_unregistered_keys)
|
||||
self.async_add_listener(_add_entities_for_unregistered_descriptors)
|
||||
)
|
||||
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Support for Fronius devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
@@ -30,7 +31,16 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, SOLAR_NET_DISCOVERY_NEW
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
SOLAR_NET_DISCOVERY_NEW,
|
||||
InverterStatusCodeOption,
|
||||
MeterLocationCodeOption,
|
||||
OhmPilotStateCodeOption,
|
||||
get_inverter_status_message,
|
||||
get_meter_location_description,
|
||||
get_ohmpilot_state_message,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import FroniusSolarNet
|
||||
@@ -102,6 +112,8 @@ class FroniusSensorEntityDescription(SensorEntityDescription):
|
||||
# Gen24 devices may report 0 for total energy while doing firmware updates.
|
||||
# Handling such values shall mitigate spikes in delta calculations.
|
||||
invalid_when_falsy: bool = False
|
||||
response_key: str | None = None
|
||||
value_fn: Callable[[StateType], StateType] | None = None
|
||||
|
||||
|
||||
INVERTER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [
|
||||
@@ -198,6 +210,15 @@ INVERTER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [
|
||||
FroniusSensorEntityDescription(
|
||||
key="status_code",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
FroniusSensorEntityDescription(
|
||||
key="status_message",
|
||||
response_key="status_code",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[opt.value for opt in InverterStatusCodeOption],
|
||||
value_fn=get_inverter_status_message,
|
||||
),
|
||||
FroniusSensorEntityDescription(
|
||||
key="led_state",
|
||||
@@ -306,6 +327,15 @@ METER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [
|
||||
FroniusSensorEntityDescription(
|
||||
key="meter_location",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=int, # type: ignore[arg-type]
|
||||
),
|
||||
FroniusSensorEntityDescription(
|
||||
key="meter_location_description",
|
||||
response_key="meter_location",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[opt.value for opt in MeterLocationCodeOption],
|
||||
value_fn=get_meter_location_description,
|
||||
),
|
||||
FroniusSensorEntityDescription(
|
||||
key="power_apparent_phase_1",
|
||||
@@ -495,7 +525,11 @@ OHMPILOT_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [
|
||||
),
|
||||
FroniusSensorEntityDescription(
|
||||
key="state_message",
|
||||
response_key="state_code",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[opt.value for opt in OhmPilotStateCodeOption],
|
||||
value_fn=get_ohmpilot_state_message,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -630,24 +664,22 @@ class _FroniusSensorEntity(CoordinatorEntity["FroniusCoordinatorBase"], SensorEn
|
||||
"""Defines a Fronius coordinator entity."""
|
||||
|
||||
entity_description: FroniusSensorEntityDescription
|
||||
entity_descriptions: list[FroniusSensorEntityDescription]
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FroniusCoordinatorBase,
|
||||
key: str,
|
||||
description: FroniusSensorEntityDescription,
|
||||
solar_net_id: str,
|
||||
) -> None:
|
||||
"""Set up an individual Fronius meter sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = next(
|
||||
desc for desc in self.entity_descriptions if desc.key == key
|
||||
)
|
||||
self.entity_description = description
|
||||
self.response_key = description.response_key or description.key
|
||||
self.solar_net_id = solar_net_id
|
||||
self._attr_native_value = self._get_entity_value()
|
||||
self._attr_translation_key = self.entity_description.key
|
||||
self._attr_translation_key = description.key
|
||||
|
||||
def _device_data(self) -> dict[str, Any]:
|
||||
"""Extract information for SolarNet device from coordinator data."""
|
||||
@@ -655,13 +687,13 @@ class _FroniusSensorEntity(CoordinatorEntity["FroniusCoordinatorBase"], SensorEn
|
||||
|
||||
def _get_entity_value(self) -> Any:
|
||||
"""Extract entity value from coordinator. Raises KeyError if not included in latest update."""
|
||||
new_value = self.coordinator.data[self.solar_net_id][
|
||||
self.entity_description.key
|
||||
]["value"]
|
||||
new_value = self.coordinator.data[self.solar_net_id][self.response_key]["value"]
|
||||
if new_value is None:
|
||||
return self.entity_description.default_value
|
||||
if self.entity_description.invalid_when_falsy and not new_value:
|
||||
return None
|
||||
if self.entity_description.value_fn is not None:
|
||||
return self.entity_description.value_fn(new_value)
|
||||
if isinstance(new_value, float):
|
||||
return round(new_value, 4)
|
||||
return new_value
|
||||
@@ -681,54 +713,54 @@ class _FroniusSensorEntity(CoordinatorEntity["FroniusCoordinatorBase"], SensorEn
|
||||
class InverterSensor(_FroniusSensorEntity):
|
||||
"""Defines a Fronius inverter device sensor entity."""
|
||||
|
||||
entity_descriptions = INVERTER_ENTITY_DESCRIPTIONS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FroniusInverterUpdateCoordinator,
|
||||
key: str,
|
||||
description: FroniusSensorEntityDescription,
|
||||
solar_net_id: str,
|
||||
) -> None:
|
||||
"""Set up an individual Fronius inverter sensor."""
|
||||
super().__init__(coordinator, key, solar_net_id)
|
||||
super().__init__(coordinator, description, solar_net_id)
|
||||
# device_info created in __init__ from a `GetInverterInfo` request
|
||||
self._attr_device_info = coordinator.inverter_info.device_info
|
||||
self._attr_unique_id = f"{coordinator.inverter_info.unique_id}-{key}"
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.inverter_info.unique_id}-{description.key}"
|
||||
)
|
||||
|
||||
|
||||
class LoggerSensor(_FroniusSensorEntity):
|
||||
"""Defines a Fronius logger device sensor entity."""
|
||||
|
||||
entity_descriptions = LOGGER_ENTITY_DESCRIPTIONS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FroniusLoggerUpdateCoordinator,
|
||||
key: str,
|
||||
description: FroniusSensorEntityDescription,
|
||||
solar_net_id: str,
|
||||
) -> None:
|
||||
"""Set up an individual Fronius meter sensor."""
|
||||
super().__init__(coordinator, key, solar_net_id)
|
||||
super().__init__(coordinator, description, solar_net_id)
|
||||
logger_data = self._device_data()
|
||||
# Logger device is already created in FroniusSolarNet._create_solar_net_device
|
||||
self._attr_device_info = coordinator.solar_net.system_device_info
|
||||
self._attr_native_unit_of_measurement = logger_data[key].get("unit")
|
||||
self._attr_unique_id = f'{logger_data["unique_identifier"]["value"]}-{key}'
|
||||
self._attr_native_unit_of_measurement = logger_data[self.response_key].get(
|
||||
"unit"
|
||||
)
|
||||
self._attr_unique_id = (
|
||||
f'{logger_data["unique_identifier"]["value"]}-{description.key}'
|
||||
)
|
||||
|
||||
|
||||
class MeterSensor(_FroniusSensorEntity):
|
||||
"""Defines a Fronius meter device sensor entity."""
|
||||
|
||||
entity_descriptions = METER_ENTITY_DESCRIPTIONS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FroniusMeterUpdateCoordinator,
|
||||
key: str,
|
||||
description: FroniusSensorEntityDescription,
|
||||
solar_net_id: str,
|
||||
) -> None:
|
||||
"""Set up an individual Fronius meter sensor."""
|
||||
super().__init__(coordinator, key, solar_net_id)
|
||||
super().__init__(coordinator, description, solar_net_id)
|
||||
meter_data = self._device_data()
|
||||
# S0 meters connected directly to inverters respond "n.a." as serial number
|
||||
# `model` contains the inverter id: "S0 Meter at inverter 1"
|
||||
@@ -745,22 +777,20 @@ class MeterSensor(_FroniusSensorEntity):
|
||||
name=meter_data["model"]["value"],
|
||||
via_device=(DOMAIN, coordinator.solar_net.solar_net_device_id),
|
||||
)
|
||||
self._attr_unique_id = f"{meter_uid}-{key}"
|
||||
self._attr_unique_id = f"{meter_uid}-{description.key}"
|
||||
|
||||
|
||||
class OhmpilotSensor(_FroniusSensorEntity):
|
||||
"""Defines a Fronius Ohmpilot sensor entity."""
|
||||
|
||||
entity_descriptions = OHMPILOT_ENTITY_DESCRIPTIONS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FroniusOhmpilotUpdateCoordinator,
|
||||
key: str,
|
||||
description: FroniusSensorEntityDescription,
|
||||
solar_net_id: str,
|
||||
) -> None:
|
||||
"""Set up an individual Fronius meter sensor."""
|
||||
super().__init__(coordinator, key, solar_net_id)
|
||||
super().__init__(coordinator, description, solar_net_id)
|
||||
device_data = self._device_data()
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
@@ -771,45 +801,41 @@ class OhmpilotSensor(_FroniusSensorEntity):
|
||||
sw_version=device_data["software"]["value"],
|
||||
via_device=(DOMAIN, coordinator.solar_net.solar_net_device_id),
|
||||
)
|
||||
self._attr_unique_id = f'{device_data["serial"]["value"]}-{key}'
|
||||
self._attr_unique_id = f'{device_data["serial"]["value"]}-{description.key}'
|
||||
|
||||
|
||||
class PowerFlowSensor(_FroniusSensorEntity):
|
||||
"""Defines a Fronius power flow sensor entity."""
|
||||
|
||||
entity_descriptions = POWER_FLOW_ENTITY_DESCRIPTIONS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FroniusPowerFlowUpdateCoordinator,
|
||||
key: str,
|
||||
description: FroniusSensorEntityDescription,
|
||||
solar_net_id: str,
|
||||
) -> None:
|
||||
"""Set up an individual Fronius power flow sensor."""
|
||||
super().__init__(coordinator, key, solar_net_id)
|
||||
super().__init__(coordinator, description, solar_net_id)
|
||||
# SolarNet device is already created in FroniusSolarNet._create_solar_net_device
|
||||
self._attr_device_info = coordinator.solar_net.system_device_info
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.solar_net.solar_net_device_id}-power_flow-{key}"
|
||||
f"{coordinator.solar_net.solar_net_device_id}-power_flow-{description.key}"
|
||||
)
|
||||
|
||||
|
||||
class StorageSensor(_FroniusSensorEntity):
|
||||
"""Defines a Fronius storage device sensor entity."""
|
||||
|
||||
entity_descriptions = STORAGE_ENTITY_DESCRIPTIONS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FroniusStorageUpdateCoordinator,
|
||||
key: str,
|
||||
description: FroniusSensorEntityDescription,
|
||||
solar_net_id: str,
|
||||
) -> None:
|
||||
"""Set up an individual Fronius storage sensor."""
|
||||
super().__init__(coordinator, key, solar_net_id)
|
||||
super().__init__(coordinator, description, solar_net_id)
|
||||
storage_data = self._device_data()
|
||||
|
||||
self._attr_unique_id = f'{storage_data["serial"]["value"]}-{key}'
|
||||
self._attr_unique_id = f'{storage_data["serial"]["value"]}-{description.key}'
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, storage_data["serial"]["value"])},
|
||||
manufacturer=storage_data["manufacturer"]["value"],
|
||||
|
@@ -66,6 +66,21 @@
|
||||
"status_code": {
|
||||
"name": "Status code"
|
||||
},
|
||||
"status_message": {
|
||||
"name": "Status message",
|
||||
"state": {
|
||||
"startup": "Startup",
|
||||
"running": "Running",
|
||||
"standby": "Standby",
|
||||
"bootloading": "Bootloading",
|
||||
"error": "Error",
|
||||
"idle": "Idle",
|
||||
"ready": "Ready",
|
||||
"sleeping": "Sleeping",
|
||||
"unknown": "Unknown",
|
||||
"invalid": "Invalid"
|
||||
}
|
||||
},
|
||||
"led_state": {
|
||||
"name": "LED state"
|
||||
},
|
||||
@@ -114,6 +129,16 @@
|
||||
"meter_location": {
|
||||
"name": "Meter location"
|
||||
},
|
||||
"meter_location_description": {
|
||||
"name": "Meter location description",
|
||||
"state": {
|
||||
"feed_in": "Grid interconnection point",
|
||||
"consumption_path": "Consumption path",
|
||||
"external_generator": "External generator",
|
||||
"external_battery": "External battery",
|
||||
"subload": "Subload"
|
||||
}
|
||||
},
|
||||
"power_apparent_phase_1": {
|
||||
"name": "Apparent power phase 1"
|
||||
},
|
||||
@@ -193,7 +218,15 @@
|
||||
"name": "State code"
|
||||
},
|
||||
"state_message": {
|
||||
"name": "State message"
|
||||
"name": "State message",
|
||||
"state": {
|
||||
"up_and_running": "Up and running",
|
||||
"keep_minimum_temperature": "Keep minimum temperature",
|
||||
"legionella_protection": "Legionella protection",
|
||||
"critical_fault": "Critical fault",
|
||||
"fault": "Fault",
|
||||
"boost_mode": "Boost mode"
|
||||
}
|
||||
},
|
||||
"meter_mode": {
|
||||
"name": "Meter mode"
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["dacite", "gios"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["gios==3.2.1"]
|
||||
"requirements": ["gios==3.2.2"]
|
||||
}
|
||||
|
@@ -79,12 +79,12 @@ _ICONS: dict[SensorKind, str] = {
|
||||
class GoodweSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class describing Goodwe sensor entities."""
|
||||
|
||||
value: Callable[
|
||||
[GoodweUpdateCoordinator, str], Any
|
||||
] = lambda coordinator, sensor: coordinator.sensor_value(sensor)
|
||||
available: Callable[
|
||||
[GoodweUpdateCoordinator], bool
|
||||
] = lambda coordinator: coordinator.last_update_success
|
||||
value: Callable[[GoodweUpdateCoordinator, str], Any] = (
|
||||
lambda coordinator, sensor: coordinator.sensor_value(sensor)
|
||||
)
|
||||
available: Callable[[GoodweUpdateCoordinator], bool] = (
|
||||
lambda coordinator: coordinator.last_update_success
|
||||
)
|
||||
|
||||
|
||||
_DESCRIPTIONS: dict[str, GoodweSensorEntityDescription] = {
|
||||
|
@@ -59,7 +59,11 @@ LOCAL_SDK_MIN_VERSION = AwesomeVersion("2.1.5")
|
||||
@callback
|
||||
def _get_registry_entries(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> tuple[er.RegistryEntry | None, dr.DeviceEntry | None, ar.AreaEntry | None,]:
|
||||
) -> tuple[
|
||||
er.RegistryEntry | None,
|
||||
dr.DeviceEntry | None,
|
||||
ar.AreaEntry | None,
|
||||
]:
|
||||
"""Get registry entries."""
|
||||
ent_reg = er.async_get(hass)
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
@@ -93,7 +93,8 @@ class GoogleTaskTodoListEntity(
|
||||
summary=item["title"],
|
||||
uid=item["id"],
|
||||
status=TODO_STATUS_MAP.get(
|
||||
item.get("status"), TodoItemStatus.NEEDS_ACTION # type: ignore[arg-type]
|
||||
item.get("status"), # type: ignore[arg-type]
|
||||
TodoItemStatus.NEEDS_ACTION,
|
||||
),
|
||||
)
|
||||
for item in _order_tasks(self.coordinator.data)
|
||||
|
@@ -195,9 +195,7 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901
|
||||
|
||||
loop = (
|
||||
# Create own thread if more than 1 CPU
|
||||
hass.loop
|
||||
if multiprocessing.cpu_count() < 2
|
||||
else None
|
||||
hass.loop if multiprocessing.cpu_count() < 2 else None
|
||||
)
|
||||
host = base_config[DOMAIN].get(CONF_HOST)
|
||||
display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)
|
||||
|
@@ -124,12 +124,15 @@ class Fan(HomeAccessory):
|
||||
),
|
||||
)
|
||||
|
||||
setter_callback = (
|
||||
lambda value, preset_mode=preset_mode: self.set_preset_mode(
|
||||
value, preset_mode
|
||||
)
|
||||
)
|
||||
self.preset_mode_chars[preset_mode] = preset_serv.configure_char(
|
||||
CHAR_ON,
|
||||
value=False,
|
||||
setter_callback=lambda value, preset_mode=preset_mode: self.set_preset_mode(
|
||||
value, preset_mode
|
||||
),
|
||||
setter_callback=setter_callback,
|
||||
)
|
||||
|
||||
if CHAR_SWING_MODE in self.chars:
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"aiohttp_cors==0.7.0",
|
||||
"aiohttp-fast-url-dispatcher==0.1.0",
|
||||
"aiohttp-fast-url-dispatcher==0.3.0",
|
||||
"aiohttp-zlib-ng==0.1.1"
|
||||
]
|
||||
}
|
||||
|
@@ -35,6 +35,7 @@ from homeassistant.const import (
|
||||
CONF_RECIPIENT,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
@@ -89,7 +90,7 @@ from .const import (
|
||||
SERVICE_SUSPEND_INTEGRATION,
|
||||
UPDATE_SIGNAL,
|
||||
)
|
||||
from .utils import get_device_macs
|
||||
from .utils import get_device_macs, non_verifying_requests_session
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -335,16 +336,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
def _connect() -> Connection:
|
||||
"""Set up a connection."""
|
||||
kwargs: dict[str, Any] = {
|
||||
"timeout": CONNECTION_TIMEOUT,
|
||||
}
|
||||
if url.startswith("https://") and not entry.data.get(CONF_VERIFY_SSL):
|
||||
kwargs["requests_session"] = non_verifying_requests_session(url)
|
||||
if entry.options.get(CONF_UNAUTHENTICATED_MODE):
|
||||
_LOGGER.debug("Connecting in unauthenticated mode, reduced feature set")
|
||||
connection = Connection(url, timeout=CONNECTION_TIMEOUT)
|
||||
connection = Connection(url, **kwargs)
|
||||
else:
|
||||
_LOGGER.debug("Connecting in authenticated mode, full feature set")
|
||||
username = entry.data.get(CONF_USERNAME) or ""
|
||||
password = entry.data.get(CONF_PASSWORD) or ""
|
||||
connection = Connection(
|
||||
url, username=username, password=password, timeout=CONNECTION_TIMEOUT
|
||||
)
|
||||
connection = Connection(url, username=username, password=password, **kwargs)
|
||||
return connection
|
||||
|
||||
try:
|
||||
|
@@ -16,7 +16,7 @@ from huawei_lte_api.exceptions import (
|
||||
ResponseErrorException,
|
||||
)
|
||||
from huawei_lte_api.Session import GetResponseType
|
||||
from requests.exceptions import Timeout
|
||||
from requests.exceptions import SSLError, Timeout
|
||||
from url_normalize import url_normalize
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -29,6 +29,7 @@ from homeassistant.const import (
|
||||
CONF_RECIPIENT,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
@@ -44,7 +45,7 @@ from .const import (
|
||||
DEFAULT_UNAUTHENTICATED_MODE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .utils import get_device_macs
|
||||
from .utils import get_device_macs, non_verifying_requests_session
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -80,6 +81,13 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self.context.get(CONF_URL, ""),
|
||||
),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_VERIFY_SSL,
|
||||
default=user_input.get(
|
||||
CONF_VERIFY_SSL,
|
||||
False,
|
||||
),
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_USERNAME, default=user_input.get(CONF_USERNAME) or ""
|
||||
): str,
|
||||
@@ -119,11 +127,20 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
password = user_input.get(CONF_PASSWORD) or ""
|
||||
|
||||
def _get_connection() -> Connection:
|
||||
if (
|
||||
user_input[CONF_URL].startswith("https://")
|
||||
and not user_input[CONF_VERIFY_SSL]
|
||||
):
|
||||
requests_session = non_verifying_requests_session(user_input[CONF_URL])
|
||||
else:
|
||||
requests_session = None
|
||||
|
||||
return Connection(
|
||||
url=user_input[CONF_URL],
|
||||
username=username,
|
||||
password=password,
|
||||
timeout=CONNECTION_TIMEOUT,
|
||||
requests_session=requests_session,
|
||||
)
|
||||
|
||||
conn = None
|
||||
@@ -140,6 +157,12 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
except ResponseErrorException:
|
||||
_LOGGER.warning("Response error", exc_info=True)
|
||||
errors["base"] = "response_error"
|
||||
except SSLError:
|
||||
_LOGGER.warning("SSL error", exc_info=True)
|
||||
if user_input[CONF_VERIFY_SSL]:
|
||||
errors[CONF_URL] = "ssl_error_try_unverified"
|
||||
else:
|
||||
errors[CONF_URL] = "ssl_error_try_plain"
|
||||
except Timeout:
|
||||
_LOGGER.warning("Connection timeout", exc_info=True)
|
||||
errors[CONF_URL] = "connection_timeout"
|
||||
@@ -152,6 +175,7 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
def _disconnect(conn: Connection) -> None:
|
||||
try:
|
||||
conn.close()
|
||||
conn.requests_session.close()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.debug("Disconnect error", exc_info=True)
|
||||
|
||||
|
@@ -14,6 +14,8 @@
|
||||
"invalid_url": "Invalid URL",
|
||||
"login_attempts_exceeded": "Maximum login attempts exceeded, please try again later",
|
||||
"response_error": "Unknown error from device",
|
||||
"ssl_error_try_plain": "HTTPS error, please try a plain HTTP URL",
|
||||
"ssl_error_try_unverified": "HTTPS error, please try disabling certificate verification or a plain HTTP URL",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"flow_title": "{name}",
|
||||
@@ -30,7 +32,8 @@
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"description": "Enter device access details.",
|
||||
"title": "Configure Huawei LTE"
|
||||
|
@@ -2,8 +2,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import re
|
||||
from urllib.parse import urlparse
|
||||
import warnings
|
||||
|
||||
from huawei_lte_api.Session import GetResponseType
|
||||
import requests
|
||||
from urllib3.exceptions import InsecureRequestWarning
|
||||
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
@@ -25,3 +30,18 @@ def get_device_macs(
|
||||
macs.extend(x.get("WifiMac") for x in wlan_settings["Ssids"]["Ssid"])
|
||||
|
||||
return sorted({format_mac(str(x)) for x in macs if x})
|
||||
|
||||
|
||||
def non_verifying_requests_session(url: str) -> requests.Session:
|
||||
"""Get requests.Session that does not verify HTTPS, filter warnings about it."""
|
||||
parsed_url = urlparse(url)
|
||||
assert parsed_url.hostname
|
||||
requests_session = requests.Session()
|
||||
requests_session.verify = False
|
||||
warnings.filterwarnings(
|
||||
"ignore",
|
||||
message=rf"^.*\b{re.escape(parsed_url.hostname)}\b",
|
||||
category=InsecureRequestWarning,
|
||||
module=r"^urllib3\.connectionpool$",
|
||||
)
|
||||
return requests_session
|
||||
|
@@ -116,5 +116,6 @@ class PowerViewSelect(ShadeEntity, SelectEntity):
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self.entity_description.select_fn(self._shade, option)
|
||||
await self._shade.refresh() # force update data to ensure new info is in coordinator
|
||||
# force update data to ensure new info is in coordinator
|
||||
await self._shade.refresh()
|
||||
self.async_write_ha_state()
|
||||
|
@@ -66,8 +66,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
coordinator: ImapPushDataUpdateCoordinator | ImapPollingDataUpdateCoordinator = hass.data[
|
||||
DOMAIN
|
||||
].pop(
|
||||
entry.entry_id
|
||||
)
|
||||
].pop(entry.entry_id)
|
||||
await coordinator.shutdown()
|
||||
return unload_ok
|
||||
|
@@ -259,7 +259,8 @@ class KrakenSensor(
|
||||
return
|
||||
try:
|
||||
self._attr_native_value = self.entity_description.value_fn(
|
||||
self.coordinator, self.tracked_asset_pair_wsname # type: ignore[arg-type]
|
||||
self.coordinator, # type: ignore[arg-type]
|
||||
self.tracked_asset_pair_wsname,
|
||||
)
|
||||
self._received_data_at_least_once = True
|
||||
except KeyError:
|
||||
|
@@ -316,7 +316,9 @@ class HeatMeterSensor(
|
||||
"""Set up the sensor with the initial values."""
|
||||
super().__init__(coordinator)
|
||||
self.key = description.key
|
||||
self._attr_unique_id = f"{coordinator.config_entry.data['device_number']}_{description.key}" # type: ignore[union-attr]
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.config_entry.data['device_number']}_{description.key}" # type: ignore[union-attr]
|
||||
)
|
||||
self._attr_name = f"Heat Meter {description.name}"
|
||||
self.entity_description = description
|
||||
self._attr_device_info = device
|
||||
|
@@ -118,7 +118,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
push_coordinator = LookinPushCoordinator(entry.title)
|
||||
|
||||
if lookin_device.model >= 2:
|
||||
meteo_coordinator = LookinDataUpdateCoordinator[MeteoSensor](
|
||||
coordinator_class = LookinDataUpdateCoordinator[MeteoSensor]
|
||||
meteo_coordinator = coordinator_class(
|
||||
hass,
|
||||
push_coordinator,
|
||||
name=entry.title,
|
||||
|
@@ -348,7 +348,10 @@ class MatrixBot:
|
||||
self._access_tokens[self._mx_id] = token
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
save_json, self._session_filepath, self._access_tokens, True # private=True
|
||||
save_json,
|
||||
self._session_filepath,
|
||||
self._access_tokens,
|
||||
True, # private=True
|
||||
)
|
||||
|
||||
async def _login(self) -> None:
|
||||
|
@@ -104,9 +104,11 @@ class MatterEventEntity(MatterEntity, EventEntity):
|
||||
"""Call when Node attribute(s) changed."""
|
||||
|
||||
@callback
|
||||
def _on_matter_node_event(
|
||||
self, event: EventType, data: MatterNodeEvent
|
||||
) -> None: # noqa: F821
|
||||
def _on_matter_node_event( # noqa: F821
|
||||
self,
|
||||
event: EventType,
|
||||
data: MatterNodeEvent,
|
||||
) -> None:
|
||||
"""Call on NodeEvent."""
|
||||
if data.endpoint_id != self._endpoint.endpoint_id:
|
||||
return
|
||||
|
@@ -1137,8 +1137,7 @@ class MediaPlayerImageView(HomeAssistantView):
|
||||
extra_urls = [
|
||||
# Need to modify the default regex for media_content_id as it may
|
||||
# include arbitrary characters including '/','{', or '}'
|
||||
url
|
||||
+ "/browse_media/{media_content_type}/{media_content_id:.+}",
|
||||
url + "/browse_media/{media_content_type}/{media_content_id:.+}",
|
||||
]
|
||||
|
||||
def __init__(self, component: EntityComponent[MediaPlayerEntity]) -> None:
|
||||
|
@@ -1,11 +1,13 @@
|
||||
"""Support to interact with a Music Player Daemon."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from datetime import timedelta
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
from socket import gaierror
|
||||
from typing import Any
|
||||
|
||||
import mpd
|
||||
@@ -92,11 +94,11 @@ class MpdDevice(MediaPlayerEntity):
|
||||
self._name = name
|
||||
self.password = password
|
||||
|
||||
self._status = None
|
||||
self._status = {}
|
||||
self._currentsong = None
|
||||
self._playlists = None
|
||||
self._currentplaylist = None
|
||||
self._is_connected = False
|
||||
self._is_available = None
|
||||
self._muted = False
|
||||
self._muted_volume = None
|
||||
self._media_position_updated_at = None
|
||||
@@ -104,34 +106,66 @@ class MpdDevice(MediaPlayerEntity):
|
||||
self._media_image_hash = None
|
||||
# Track if the song changed so image doesn't have to be loaded every update.
|
||||
self._media_image_file = None
|
||||
self._commands = None
|
||||
|
||||
# set up MPD client
|
||||
self._client = MPDClient()
|
||||
self._client.timeout = 30
|
||||
self._client.idletimeout = None
|
||||
self._client.idletimeout = 10
|
||||
self._client_lock = asyncio.Lock()
|
||||
|
||||
async def _connect(self):
|
||||
"""Connect to MPD."""
|
||||
# Instead of relying on python-mpd2 to maintain a (persistent) connection to
|
||||
# MPD, the below explicitly sets up a *non*-persistent connection. This is
|
||||
# done to workaround the issue as described in:
|
||||
# <https://github.com/Mic92/python-mpd2/issues/31>
|
||||
@asynccontextmanager
|
||||
async def connection(self):
|
||||
"""Handle MPD connect and disconnect."""
|
||||
async with self._client_lock:
|
||||
try:
|
||||
# MPDClient.connect() doesn't always respect its timeout. To
|
||||
# prevent a deadlock, enforce an additional (slightly longer)
|
||||
# timeout on the coroutine itself.
|
||||
try:
|
||||
async with asyncio.timeout(self._client.timeout + 5):
|
||||
await self._client.connect(self.server, self.port)
|
||||
|
||||
except asyncio.TimeoutError as error:
|
||||
# TimeoutError has no message (which hinders logging further
|
||||
# down the line), so provide one.
|
||||
raise asyncio.TimeoutError(
|
||||
"Connection attempt timed out"
|
||||
) from error
|
||||
if self.password is not None:
|
||||
await self._client.password(self.password)
|
||||
except mpd.ConnectionError:
|
||||
return
|
||||
|
||||
self._is_connected = True
|
||||
|
||||
def _disconnect(self):
|
||||
"""Disconnect from MPD."""
|
||||
self._is_available = True
|
||||
yield
|
||||
except (
|
||||
asyncio.TimeoutError,
|
||||
gaierror,
|
||||
mpd.ConnectionError,
|
||||
OSError,
|
||||
) as error:
|
||||
# Log a warning during startup or when previously connected; for
|
||||
# subsequent errors a debug message is sufficient.
|
||||
log_level = logging.DEBUG
|
||||
if self._is_available is not False:
|
||||
log_level = logging.WARNING
|
||||
_LOGGER.log(
|
||||
log_level, "Error connecting to '%s': %s", self.server, error
|
||||
)
|
||||
self._is_available = False
|
||||
self._status = {}
|
||||
# Also yield on failure. Handling mpd.ConnectionErrors caused by
|
||||
# attempting to control a disconnected client is the
|
||||
# responsibility of the caller.
|
||||
yield
|
||||
finally:
|
||||
with suppress(mpd.ConnectionError):
|
||||
self._client.disconnect()
|
||||
self._is_connected = False
|
||||
self._status = None
|
||||
|
||||
async def _fetch_status(self):
|
||||
"""Fetch status from MPD."""
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data from MPD and update the state."""
|
||||
async with self.connection():
|
||||
try:
|
||||
self._status = await self._client.status()
|
||||
self._currentsong = await self._client.currentsong()
|
||||
await self._async_update_media_image_hash()
|
||||
@@ -147,24 +181,13 @@ class MpdDevice(MediaPlayerEntity):
|
||||
self._media_position = int(float(position))
|
||||
|
||||
await self._update_playlists()
|
||||
except (mpd.ConnectionError, ValueError) as error:
|
||||
_LOGGER.debug("Error updating status: %s", error)
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
def available(self) -> bool:
|
||||
"""Return true if MPD is available and connected."""
|
||||
return self._is_connected
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data and update the state."""
|
||||
try:
|
||||
if not self._is_connected:
|
||||
await self._connect()
|
||||
self._commands = list(await self._client.commands())
|
||||
|
||||
await self._fetch_status()
|
||||
except (mpd.ConnectionError, OSError, ValueError) as error:
|
||||
# Cleanly disconnect in case connection is not in valid state
|
||||
_LOGGER.debug("Error updating status: %s", error)
|
||||
self._disconnect()
|
||||
return self._is_available is True
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -174,13 +197,13 @@ class MpdDevice(MediaPlayerEntity):
|
||||
@property
|
||||
def state(self) -> MediaPlayerState:
|
||||
"""Return the media state."""
|
||||
if self._status is None:
|
||||
if not self._status:
|
||||
return MediaPlayerState.OFF
|
||||
if self._status["state"] == "play":
|
||||
if self._status.get("state") == "play":
|
||||
return MediaPlayerState.PLAYING
|
||||
if self._status["state"] == "pause":
|
||||
if self._status.get("state") == "pause":
|
||||
return MediaPlayerState.PAUSED
|
||||
if self._status["state"] == "stop":
|
||||
if self._status.get("state") == "stop":
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
return MediaPlayerState.OFF
|
||||
@@ -259,8 +282,11 @@ class MpdDevice(MediaPlayerEntity):
|
||||
|
||||
async def async_get_media_image(self) -> tuple[bytes | None, str | None]:
|
||||
"""Fetch media image of current playing track."""
|
||||
if not (file := self._currentsong.get("file")):
|
||||
async with self.connection():
|
||||
if self._currentsong is None or not (file := self._currentsong.get("file")):
|
||||
return None, None
|
||||
|
||||
with suppress(mpd.ConnectionError):
|
||||
response = await self._async_get_file_image_response(file)
|
||||
if response is None:
|
||||
return None, None
|
||||
@@ -273,6 +299,9 @@ class MpdDevice(MediaPlayerEntity):
|
||||
|
||||
async def _async_update_media_image_hash(self):
|
||||
"""Update the hash value for the media image."""
|
||||
if self._currentsong is None:
|
||||
return
|
||||
|
||||
file = self._currentsong.get("file")
|
||||
|
||||
if file == self._media_image_file:
|
||||
@@ -295,15 +324,20 @@ class MpdDevice(MediaPlayerEntity):
|
||||
self._media_image_file = file
|
||||
|
||||
async def _async_get_file_image_response(self, file):
|
||||
# not all MPD implementations and versions support the `albumart` and `fetchpicture` commands
|
||||
can_albumart = "albumart" in self._commands
|
||||
can_readpicture = "readpicture" in self._commands
|
||||
# not all MPD implementations and versions support the `albumart` and
|
||||
# `fetchpicture` commands.
|
||||
commands = []
|
||||
with suppress(mpd.ConnectionError):
|
||||
commands = list(await self._client.commands())
|
||||
can_albumart = "albumart" in commands
|
||||
can_readpicture = "readpicture" in commands
|
||||
|
||||
response = None
|
||||
|
||||
# read artwork embedded into the media file
|
||||
if can_readpicture:
|
||||
try:
|
||||
with suppress(mpd.ConnectionError):
|
||||
response = await self._client.readpicture(file)
|
||||
except mpd.CommandError as error:
|
||||
if error.errno is not mpd.FailureResponseCode.NO_EXIST:
|
||||
@@ -315,6 +349,7 @@ class MpdDevice(MediaPlayerEntity):
|
||||
# read artwork contained in the media directory (cover.{jpg,png,tiff,bmp}) if none is embedded
|
||||
if can_albumart and not response:
|
||||
try:
|
||||
with suppress(mpd.ConnectionError):
|
||||
response = await self._client.albumart(file)
|
||||
except mpd.CommandError as error:
|
||||
if error.errno is not mpd.FailureResponseCode.NO_EXIST:
|
||||
@@ -339,7 +374,7 @@ class MpdDevice(MediaPlayerEntity):
|
||||
@property
|
||||
def supported_features(self) -> MediaPlayerEntityFeature:
|
||||
"""Flag media player features that are supported."""
|
||||
if self._status is None:
|
||||
if not self._status:
|
||||
return MediaPlayerEntityFeature(0)
|
||||
|
||||
supported = SUPPORT_MPD
|
||||
@@ -373,6 +408,7 @@ class MpdDevice(MediaPlayerEntity):
|
||||
"""Update available MPD playlists."""
|
||||
try:
|
||||
self._playlists = []
|
||||
with suppress(mpd.ConnectionError):
|
||||
for playlist_data in await self._client.listplaylists():
|
||||
self._playlists.append(playlist_data["playlist"])
|
||||
except mpd.CommandError as error:
|
||||
@@ -381,11 +417,13 @@ class MpdDevice(MediaPlayerEntity):
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Set volume of media player."""
|
||||
async with self.connection():
|
||||
if "volume" in self._status:
|
||||
await self._client.setvol(int(volume * 100))
|
||||
|
||||
async def async_volume_up(self) -> None:
|
||||
"""Service to send the MPD the command for volume up."""
|
||||
async with self.connection():
|
||||
if "volume" in self._status:
|
||||
current_volume = int(self._status["volume"])
|
||||
|
||||
@@ -394,6 +432,7 @@ class MpdDevice(MediaPlayerEntity):
|
||||
|
||||
async def async_volume_down(self) -> None:
|
||||
"""Service to send the MPD the command for volume down."""
|
||||
async with self.connection():
|
||||
if "volume" in self._status:
|
||||
current_volume = int(self._status["volume"])
|
||||
|
||||
@@ -402,25 +441,30 @@ class MpdDevice(MediaPlayerEntity):
|
||||
|
||||
async def async_media_play(self) -> None:
|
||||
"""Service to send the MPD the command for play/pause."""
|
||||
if self._status["state"] == "pause":
|
||||
async with self.connection():
|
||||
if self._status.get("state") == "pause":
|
||||
await self._client.pause(0)
|
||||
else:
|
||||
await self._client.play()
|
||||
|
||||
async def async_media_pause(self) -> None:
|
||||
"""Service to send the MPD the command for play/pause."""
|
||||
async with self.connection():
|
||||
await self._client.pause(1)
|
||||
|
||||
async def async_media_stop(self) -> None:
|
||||
"""Service to send the MPD the command for stop."""
|
||||
async with self.connection():
|
||||
await self._client.stop()
|
||||
|
||||
async def async_media_next_track(self) -> None:
|
||||
"""Service to send the MPD the command for next track."""
|
||||
async with self.connection():
|
||||
await self._client.next()
|
||||
|
||||
async def async_media_previous_track(self) -> None:
|
||||
"""Service to send the MPD the command for previous track."""
|
||||
async with self.connection():
|
||||
await self._client.previous()
|
||||
|
||||
async def async_mute_volume(self, mute: bool) -> None:
|
||||
@@ -437,6 +481,7 @@ class MpdDevice(MediaPlayerEntity):
|
||||
self, media_type: MediaType | str, media_id: str, **kwargs: Any
|
||||
) -> None:
|
||||
"""Send the media player the command for playing a playlist."""
|
||||
async with self.connection():
|
||||
if media_source.is_media_source_id(media_id):
|
||||
media_type = MediaType.MUSIC
|
||||
play_item = await media_source.async_resolve_media(
|
||||
@@ -463,14 +508,15 @@ class MpdDevice(MediaPlayerEntity):
|
||||
@property
|
||||
def repeat(self) -> RepeatMode:
|
||||
"""Return current repeat mode."""
|
||||
if self._status["repeat"] == "1":
|
||||
if self._status["single"] == "1":
|
||||
if self._status.get("repeat") == "1":
|
||||
if self._status.get("single") == "1":
|
||||
return RepeatMode.ONE
|
||||
return RepeatMode.ALL
|
||||
return RepeatMode.OFF
|
||||
|
||||
async def async_set_repeat(self, repeat: RepeatMode) -> None:
|
||||
"""Set repeat mode."""
|
||||
async with self.connection():
|
||||
if repeat == RepeatMode.OFF:
|
||||
await self._client.repeat(0)
|
||||
await self._client.single(0)
|
||||
@@ -484,27 +530,32 @@ class MpdDevice(MediaPlayerEntity):
|
||||
@property
|
||||
def shuffle(self):
|
||||
"""Boolean if shuffle is enabled."""
|
||||
return bool(int(self._status["random"]))
|
||||
return bool(int(self._status.get("random")))
|
||||
|
||||
async def async_set_shuffle(self, shuffle: bool) -> None:
|
||||
"""Enable/disable shuffle mode."""
|
||||
async with self.connection():
|
||||
await self._client.random(int(shuffle))
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Service to send the MPD the command to stop playing."""
|
||||
async with self.connection():
|
||||
await self._client.stop()
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Service to send the MPD the command to start playing."""
|
||||
async with self.connection():
|
||||
await self._client.play()
|
||||
await self._update_playlists(no_throttle=True)
|
||||
|
||||
async def async_clear_playlist(self) -> None:
|
||||
"""Clear players playlist."""
|
||||
async with self.connection():
|
||||
await self._client.clear()
|
||||
|
||||
async def async_media_seek(self, position: float) -> None:
|
||||
"""Send seek command."""
|
||||
async with self.connection():
|
||||
await self._client.seekcur(position)
|
||||
|
||||
async def async_browse_media(
|
||||
@@ -513,8 +564,11 @@ class MpdDevice(MediaPlayerEntity):
|
||||
media_content_id: str | None = None,
|
||||
) -> BrowseMedia:
|
||||
"""Implement the websocket media browsing helper."""
|
||||
async with self.connection():
|
||||
return await media_source.async_browse_media(
|
||||
self.hass,
|
||||
media_content_id,
|
||||
content_filter=lambda item: item.media_content_type.startswith("audio/"),
|
||||
content_filter=lambda item: item.media_content_type.startswith(
|
||||
"audio/"
|
||||
),
|
||||
)
|
||||
|
@@ -470,9 +470,10 @@ class MqttTemperatureControlEntity(MqttEntity, ABC):
|
||||
except ValueError:
|
||||
_LOGGER.error("Could not parse %s from %s", template_name, payload)
|
||||
|
||||
def prepare_subscribe_topics(
|
||||
self, topics: dict[str, dict[str, Any]]
|
||||
) -> None: # noqa: C901
|
||||
def prepare_subscribe_topics( # noqa: C901
|
||||
self,
|
||||
topics: dict[str, dict[str, Any]],
|
||||
) -> None:
|
||||
"""(Re)Subscribe to topics."""
|
||||
|
||||
@callback
|
||||
|
@@ -63,9 +63,8 @@ async def async_wait_for_mqtt_client(hass: HomeAssistant) -> bool:
|
||||
|
||||
state_reached_future: asyncio.Future[bool]
|
||||
if DATA_MQTT_AVAILABLE not in hass.data:
|
||||
hass.data[
|
||||
DATA_MQTT_AVAILABLE
|
||||
] = state_reached_future = hass.loop.create_future()
|
||||
state_reached_future = hass.loop.create_future()
|
||||
hass.data[DATA_MQTT_AVAILABLE] = state_reached_future
|
||||
else:
|
||||
state_reached_future = hass.data[DATA_MQTT_AVAILABLE]
|
||||
if state_reached_future.done():
|
||||
|
@@ -34,9 +34,9 @@ UNIT_OF_LOAD: Final[str] = "load"
|
||||
class NextcloudSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Nextcloud sensor entity."""
|
||||
|
||||
value_fn: Callable[
|
||||
[str | int | float], str | int | float | datetime
|
||||
] = lambda value: value
|
||||
value_fn: Callable[[str | int | float], str | int | float | datetime] = (
|
||||
lambda value: value
|
||||
)
|
||||
|
||||
|
||||
SENSORS: Final[list[NextcloudSensorEntityDescription]] = [
|
||||
|
@@ -39,7 +39,7 @@ from homeassistant.helpers.update_coordinator import (
|
||||
UpdateFailed,
|
||||
)
|
||||
|
||||
from .const import DEFAULT_TIMEOUT, DOMAIN, ERROR_STATES
|
||||
from .const import CONF_ENCRYPT_TOKEN, DEFAULT_TIMEOUT, DOMAIN, ERROR_STATES
|
||||
from .helpers import NukiWebhookException, parse_id
|
||||
|
||||
_NukiDeviceT = TypeVar("_NukiDeviceT", bound=NukiDevice)
|
||||
@@ -188,7 +188,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry.data[CONF_HOST],
|
||||
entry.data[CONF_TOKEN],
|
||||
entry.data[CONF_PORT],
|
||||
True,
|
||||
entry.data.get(CONF_ENCRYPT_TOKEN, True),
|
||||
DEFAULT_TIMEOUT,
|
||||
)
|
||||
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.components import dhcp
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .const import DEFAULT_PORT, DEFAULT_TIMEOUT, DOMAIN
|
||||
from .const import CONF_ENCRYPT_TOKEN, DEFAULT_PORT, DEFAULT_TIMEOUT, DOMAIN
|
||||
from .helpers import CannotConnect, InvalidAuth, parse_id
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -26,7 +26,12 @@ USER_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_TOKEN): str})
|
||||
REAUTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TOKEN): str,
|
||||
vol.Optional(CONF_ENCRYPT_TOKEN, default=True): bool,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass, data):
|
||||
@@ -41,7 +46,7 @@ async def validate_input(hass, data):
|
||||
data[CONF_HOST],
|
||||
data[CONF_TOKEN],
|
||||
data[CONF_PORT],
|
||||
True,
|
||||
data.get(CONF_ENCRYPT_TOKEN, True),
|
||||
DEFAULT_TIMEOUT,
|
||||
)
|
||||
|
||||
@@ -100,6 +105,7 @@ class NukiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
CONF_HOST: self._data[CONF_HOST],
|
||||
CONF_PORT: self._data[CONF_PORT],
|
||||
CONF_TOKEN: user_input[CONF_TOKEN],
|
||||
CONF_ENCRYPT_TOKEN: user_input[CONF_ENCRYPT_TOKEN],
|
||||
}
|
||||
|
||||
try:
|
||||
@@ -131,8 +137,15 @@ class NukiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
async def async_step_validate(self, user_input=None):
|
||||
"""Handle init step of a flow."""
|
||||
|
||||
data_schema = self.discovery_schema or USER_SCHEMA
|
||||
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
data_schema = USER_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_ENCRYPT_TOKEN, default=True): bool,
|
||||
}
|
||||
)
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
@@ -149,7 +162,8 @@ class NukiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=bridge_id, data=user_input)
|
||||
|
||||
data_schema = self.discovery_schema or USER_SCHEMA
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=data_schema, errors=errors
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(data_schema, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
@@ -12,3 +12,6 @@ DEFAULT_PORT = 8080
|
||||
DEFAULT_TIMEOUT = 20
|
||||
|
||||
ERROR_STATES = (0, 254, 255)
|
||||
|
||||
# Encrypt token, instead of using a plaintext token
|
||||
CONF_ENCRYPT_TOKEN = "encrypt_token"
|
||||
|
@@ -5,14 +5,16 @@
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"token": "[%key:common::config_flow::data::access_token%]"
|
||||
"token": "[%key:common::config_flow::data::access_token%]",
|
||||
"encrypt_token": "Use an encrypted token for authentication."
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "The Nuki integration needs to re-authenticate with your bridge.",
|
||||
"data": {
|
||||
"token": "[%key:common::config_flow::data::access_token%]"
|
||||
"token": "[%key:common::config_flow::data::access_token%]",
|
||||
"encrypt_token": "[%key:component::nuki::config::step::user::data::encrypt_token%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -32,8 +32,7 @@ class ONVIFBaseEntity(Entity):
|
||||
See: https://github.com/home-assistant/core/issues/35883
|
||||
"""
|
||||
return (
|
||||
self.device.info.mac
|
||||
or self.device.info.serial_number # type:ignore[return-value]
|
||||
self.device.info.mac or self.device.info.serial_number # type:ignore[return-value]
|
||||
)
|
||||
|
||||
@property
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""The OurGroceries coordinator."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
@@ -25,6 +26,7 @@ class OurGroceriesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]):
|
||||
"""Initialize global OurGroceries data updater."""
|
||||
self.og = og
|
||||
self.lists = lists
|
||||
self._ids = [sl["id"] for sl in lists]
|
||||
interval = timedelta(seconds=SCAN_INTERVAL)
|
||||
super().__init__(
|
||||
hass,
|
||||
@@ -35,7 +37,11 @@ class OurGroceriesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]):
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict]:
|
||||
"""Fetch data from OurGroceries."""
|
||||
return {
|
||||
sl["id"]: (await self.og.get_list_items(list_id=sl["id"]))
|
||||
for sl in self.lists
|
||||
}
|
||||
return dict(
|
||||
zip(
|
||||
self._ids,
|
||||
await asyncio.gather(
|
||||
*[self.og.get_list_items(list_id=id) for id in self._ids]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
@@ -12,9 +12,6 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""A todo platform for OurGroceries."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.todo import (
|
||||
TodoItem,
|
||||
@@ -28,6 +29,12 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
def _completion_status(item: dict[str, Any]) -> TodoItemStatus:
|
||||
if item.get("crossedOffAt", False):
|
||||
return TodoItemStatus.COMPLETED
|
||||
return TodoItemStatus.NEEDS_ACTION
|
||||
|
||||
|
||||
class OurGroceriesTodoListEntity(
|
||||
CoordinatorEntity[OurGroceriesDataUpdateCoordinator], TodoListEntity
|
||||
):
|
||||
@@ -58,12 +65,6 @@ class OurGroceriesTodoListEntity(
|
||||
if self.coordinator.data is None:
|
||||
self._attr_todo_items = None
|
||||
else:
|
||||
|
||||
def _completion_status(item):
|
||||
if item.get("crossedOffAt", False):
|
||||
return TodoItemStatus.COMPLETED
|
||||
return TodoItemStatus.NEEDS_ACTION
|
||||
|
||||
self._attr_todo_items = [
|
||||
TodoItem(
|
||||
summary=item["name"],
|
||||
|
@@ -245,12 +245,13 @@ class HitachiAirToAirHeatPumpHLRRWIFI(OverkizEntity, ClimateEntity):
|
||||
MODE_CHANGE_STATE,
|
||||
OverkizCommandParam.AUTO,
|
||||
).lower() # Overkiz can return states that have uppercase characters which are not accepted back as commands
|
||||
if hvac_mode.replace(
|
||||
" ", ""
|
||||
) in [ # Overkiz can return states like 'auto cooling' or 'autoHeating' that are not valid commands and need to be converted to 'auto'
|
||||
if (
|
||||
hvac_mode.replace(" ", "")
|
||||
in [ # Overkiz can return states like 'auto cooling' or 'autoHeating' that are not valid commands and need to be converted to 'auto'
|
||||
OverkizCommandParam.AUTOCOOLING,
|
||||
OverkizCommandParam.AUTOHEATING,
|
||||
]:
|
||||
]
|
||||
):
|
||||
hvac_mode = OverkizCommandParam.AUTO
|
||||
|
||||
swing_mode = self._control_backfill(
|
||||
|
@@ -17,10 +17,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import (
|
||||
@@ -44,6 +41,7 @@ from .const import (
|
||||
SENSOR_SELECTED_SLOT_MIN_ORDER_VALUE,
|
||||
SENSOR_SELECTED_SLOT_START,
|
||||
)
|
||||
from .coordinator import PicnicUpdateCoordinator
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -237,7 +235,7 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class PicnicSensor(SensorEntity, CoordinatorEntity):
|
||||
class PicnicSensor(SensorEntity, CoordinatorEntity[PicnicUpdateCoordinator]):
|
||||
"""The CoordinatorEntity subclass representing Picnic sensors."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -246,7 +244,7 @@ class PicnicSensor(SensorEntity, CoordinatorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator[Any],
|
||||
coordinator: PicnicUpdateCoordinator,
|
||||
config_entry: ConfigEntry,
|
||||
description: PicnicSensorEntityDescription,
|
||||
) -> None:
|
||||
|
@@ -77,8 +77,11 @@ async def handle_add_product(
|
||||
)
|
||||
|
||||
|
||||
def product_search(api_client: PicnicAPI, product_name: str) -> None | str:
|
||||
def product_search(api_client: PicnicAPI, product_name: str | None) -> None | str:
|
||||
"""Query the api client for the product name."""
|
||||
if product_name is None:
|
||||
return None
|
||||
|
||||
search_result = api_client.search(product_name)
|
||||
|
||||
if not search_result or "items" not in search_result[0]:
|
||||
|
@@ -2,7 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from typing import cast
|
||||
|
||||
from homeassistant.components.todo import (
|
||||
TodoItem,
|
||||
@@ -14,12 +14,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import CONF_COORDINATOR, DOMAIN
|
||||
from .coordinator import PicnicUpdateCoordinator
|
||||
from .services import product_search
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -36,7 +34,7 @@ async def async_setup_entry(
|
||||
async_add_entities([PicnicCart(hass, picnic_coordinator, config_entry)])
|
||||
|
||||
|
||||
class PicnicCart(TodoListEntity, CoordinatorEntity):
|
||||
class PicnicCart(TodoListEntity, CoordinatorEntity[PicnicUpdateCoordinator]):
|
||||
"""A Picnic Shopping Cart TodoListEntity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -47,7 +45,7 @@ class PicnicCart(TodoListEntity, CoordinatorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
coordinator: DataUpdateCoordinator[Any],
|
||||
coordinator: PicnicUpdateCoordinator,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize PicnicCart."""
|
||||
|
@@ -83,13 +83,17 @@ SENSOR_DESCRIPTIONS = (
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda hass, service_info: bluetooth.async_get_learned_advertising_interval(
|
||||
value_fn=(
|
||||
lambda hass, service_info: (
|
||||
bluetooth.async_get_learned_advertising_interval(
|
||||
hass, service_info.address
|
||||
)
|
||||
or bluetooth.async_get_fallback_availability_interval(
|
||||
hass, service_info.address
|
||||
)
|
||||
or bluetooth.FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS,
|
||||
or bluetooth.FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS
|
||||
)
|
||||
),
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
)
|
||||
|
@@ -10,10 +10,9 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.entity_registry import async_entries_for_config_entry
|
||||
|
||||
from .const import CONF_SERIAL_NUMBER
|
||||
from .coordinator import RainbirdData
|
||||
@@ -55,6 +54,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
format_mac(mac_address),
|
||||
str(entry.data[CONF_SERIAL_NUMBER]),
|
||||
)
|
||||
_async_fix_device_id(
|
||||
hass,
|
||||
dr.async_get(hass),
|
||||
entry.entry_id,
|
||||
format_mac(mac_address),
|
||||
str(entry.data[CONF_SERIAL_NUMBER]),
|
||||
)
|
||||
|
||||
try:
|
||||
model_info = await controller.get_model_and_version()
|
||||
@@ -124,7 +130,7 @@ def _async_fix_entity_unique_id(
|
||||
serial_number: str,
|
||||
) -> None:
|
||||
"""Migrate existing entity if current one can't be found and an old one exists."""
|
||||
entity_entries = async_entries_for_config_entry(entity_registry, config_entry_id)
|
||||
entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
||||
for entity_entry in entity_entries:
|
||||
unique_id = str(entity_entry.unique_id)
|
||||
if unique_id.startswith(mac_address):
|
||||
@@ -137,6 +143,70 @@ def _async_fix_entity_unique_id(
|
||||
)
|
||||
|
||||
|
||||
def _async_device_entry_to_keep(
|
||||
old_entry: dr.DeviceEntry, new_entry: dr.DeviceEntry
|
||||
) -> dr.DeviceEntry:
|
||||
"""Determine which device entry to keep when there are duplicates.
|
||||
|
||||
As we transitioned to new unique ids, we did not update existing device entries
|
||||
and as a result there are devices with both the old and new unique id format. We
|
||||
have to pick which one to keep, and preferably this can repair things if the
|
||||
user previously renamed devices.
|
||||
"""
|
||||
# Prefer the new device if the user already gave it a name or area. Otherwise,
|
||||
# do the same for the old entry. If no entries have been modified then keep the new one.
|
||||
if new_entry.disabled_by is None and (
|
||||
new_entry.area_id is not None or new_entry.name_by_user is not None
|
||||
):
|
||||
return new_entry
|
||||
if old_entry.disabled_by is None and (
|
||||
old_entry.area_id is not None or old_entry.name_by_user is not None
|
||||
):
|
||||
return old_entry
|
||||
return new_entry if new_entry.disabled_by is None else old_entry
|
||||
|
||||
|
||||
def _async_fix_device_id(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
config_entry_id: str,
|
||||
mac_address: str,
|
||||
serial_number: str,
|
||||
) -> None:
|
||||
"""Migrate existing device identifiers to the new format.
|
||||
|
||||
This will rename any device ids that are prefixed with the serial number to be prefixed
|
||||
with the mac address. This also cleans up from a bug that allowed devices to exist
|
||||
in both the old and new format.
|
||||
"""
|
||||
device_entries = dr.async_entries_for_config_entry(device_registry, config_entry_id)
|
||||
device_entry_map = {}
|
||||
migrations = {}
|
||||
for device_entry in device_entries:
|
||||
unique_id = next(iter(device_entry.identifiers))[1]
|
||||
device_entry_map[unique_id] = device_entry
|
||||
if (suffix := unique_id.removeprefix(str(serial_number))) != unique_id:
|
||||
migrations[unique_id] = f"{mac_address}{suffix}"
|
||||
|
||||
for unique_id, new_unique_id in migrations.items():
|
||||
old_entry = device_entry_map[unique_id]
|
||||
if (new_entry := device_entry_map.get(new_unique_id)) is not None:
|
||||
# Device entries exist for both the old and new format and one must be removed
|
||||
entry_to_keep = _async_device_entry_to_keep(old_entry, new_entry)
|
||||
if entry_to_keep == new_entry:
|
||||
_LOGGER.debug("Removing device entry %s", unique_id)
|
||||
device_registry.async_remove_device(old_entry.id)
|
||||
continue
|
||||
# Remove new entry and update old entry to new id below
|
||||
_LOGGER.debug("Removing device entry %s", new_unique_id)
|
||||
device_registry.async_remove_device(new_entry.id)
|
||||
|
||||
_LOGGER.debug("Updating device id from %s to %s", unique_id, new_unique_id)
|
||||
device_registry.async_update_device(
|
||||
old_entry.id, new_identifiers={(DOMAIN, new_unique_id)}
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
|
@@ -101,9 +101,8 @@ def _validate_table_schema_has_correct_collation(
|
||||
|
||||
collate = (
|
||||
dialect_kwargs.get("mysql_collate")
|
||||
or dialect_kwargs.get(
|
||||
"mariadb_collate"
|
||||
) # pylint: disable-next=protected-access
|
||||
or dialect_kwargs.get("mariadb_collate")
|
||||
# pylint: disable-next=protected-access
|
||||
or connection.dialect._fetch_setting(connection, "collation_server") # type: ignore[attr-defined]
|
||||
)
|
||||
if collate and collate != "utf8mb4_unicode_ci":
|
||||
|
@@ -176,13 +176,17 @@ class NativeLargeBinary(LargeBinary):
|
||||
# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32
|
||||
# for sqlite and postgresql we use a bigint
|
||||
UINT_32_TYPE = BigInteger().with_variant(
|
||||
mysql.INTEGER(unsigned=True), "mysql", "mariadb" # type: ignore[no-untyped-call]
|
||||
mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call]
|
||||
"mysql",
|
||||
"mariadb",
|
||||
)
|
||||
JSON_VARIANT_CAST = Text().with_variant(
|
||||
postgresql.JSON(none_as_null=True), "postgresql" # type: ignore[no-untyped-call]
|
||||
postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call]
|
||||
"postgresql",
|
||||
)
|
||||
JSONB_VARIANT_CAST = Text().with_variant(
|
||||
postgresql.JSONB(none_as_null=True), "postgresql" # type: ignore[no-untyped-call]
|
||||
postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call]
|
||||
"postgresql",
|
||||
)
|
||||
DATETIME_TYPE = (
|
||||
DateTime(timezone=True)
|
||||
|
@@ -244,7 +244,8 @@ class Filters:
|
||||
),
|
||||
# Needs https://github.com/bdraco/home-assistant/commit/bba91945006a46f3a01870008eb048e4f9cbb1ef
|
||||
self._generate_filter_for_columns(
|
||||
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), _encoder # type: ignore[arg-type]
|
||||
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), # type: ignore[arg-type]
|
||||
_encoder,
|
||||
).self_group(),
|
||||
)
|
||||
|
||||
|
@@ -532,7 +532,9 @@ def _update_states_table_with_foreign_key_options(
|
||||
|
||||
states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints
|
||||
old_states_table = Table( # noqa: F841
|
||||
TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters) # type: ignore[arg-type]
|
||||
TABLE_STATES,
|
||||
MetaData(),
|
||||
*(alter["old_fk"] for alter in alters), # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
for alter in alters:
|
||||
|
@@ -89,9 +89,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)):
|
||||
await host.renew()
|
||||
|
||||
async def async_check_firmware_update() -> str | Literal[
|
||||
False
|
||||
] | NewSoftwareVersion:
|
||||
async def async_check_firmware_update() -> (
|
||||
str | Literal[False] | NewSoftwareVersion
|
||||
):
|
||||
"""Check for firmware updates."""
|
||||
if not host.api.supported(None, "update"):
|
||||
return False
|
||||
|
@@ -566,10 +566,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports)
|
||||
list_of_ports = {}
|
||||
for port in ports:
|
||||
list_of_ports[
|
||||
port.device
|
||||
] = f"{port}, s/n: {port.serial_number or 'n/a'}" + (
|
||||
f" - {port.manufacturer}" if port.manufacturer else ""
|
||||
list_of_ports[port.device] = (
|
||||
f"{port}, s/n: {port.serial_number or 'n/a'}"
|
||||
+ (f" - {port.manufacturer}" if port.manufacturer else "")
|
||||
)
|
||||
list_of_ports[CONF_MANUAL_PATH] = CONF_MANUAL_PATH
|
||||
|
||||
|
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ring",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ring_doorbell"],
|
||||
"requirements": ["ring-doorbell[listen]==0.8.2"]
|
||||
"requirements": ["ring-doorbell[listen]==0.8.3"]
|
||||
}
|
||||
|
@@ -27,16 +27,28 @@ async def async_get_config_entry_diagnostics(
|
||||
},
|
||||
"data": {
|
||||
"dsl": async_redact_data(
|
||||
dataclasses.asdict(await data.system.box.dsl_get_info()), TO_REDACT
|
||||
dataclasses.asdict(
|
||||
await data.system.box.dsl_get_info() # type:ignore [call-overload]
|
||||
),
|
||||
TO_REDACT,
|
||||
),
|
||||
"ftth": async_redact_data(
|
||||
dataclasses.asdict(await data.system.box.ftth_get_info()), TO_REDACT
|
||||
dataclasses.asdict(
|
||||
await data.system.box.ftth_get_info() # type:ignore [call-overload]
|
||||
),
|
||||
TO_REDACT,
|
||||
),
|
||||
"system": async_redact_data(
|
||||
dataclasses.asdict(await data.system.box.system_get_info()), TO_REDACT
|
||||
dataclasses.asdict(
|
||||
await data.system.box.system_get_info() # type:ignore [call-overload]
|
||||
),
|
||||
TO_REDACT,
|
||||
),
|
||||
"wan": async_redact_data(
|
||||
dataclasses.asdict(await data.system.box.wan_get_info()), TO_REDACT
|
||||
dataclasses.asdict(
|
||||
await data.system.box.wan_get_info() # type:ignore [call-overload]
|
||||
),
|
||||
TO_REDACT,
|
||||
),
|
||||
},
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sfr_box",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["sfrbox-api==0.0.6"]
|
||||
"requirements": ["sfrbox-api==0.0.8"]
|
||||
}
|
||||
|
@@ -188,7 +188,7 @@ SYSTEM_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[SystemInfo], ...] = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda x: x.temperature / 1000,
|
||||
value_fn=lambda x: None if x.temperature is None else x.temperature / 1000,
|
||||
),
|
||||
)
|
||||
WAN_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[WanInfo], ...] = (
|
||||
|
@@ -280,9 +280,9 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
|
||||
|
||||
async def _async_fallback_poll(self) -> None:
|
||||
"""Retrieve latest state by polling."""
|
||||
await self.hass.data[DATA_SONOS].favorites[
|
||||
self.speaker.household_id
|
||||
].async_poll()
|
||||
await (
|
||||
self.hass.data[DATA_SONOS].favorites[self.speaker.household_id].async_poll()
|
||||
)
|
||||
await self.hass.async_add_executor_job(self._update)
|
||||
|
||||
def _update(self) -> None:
|
||||
|
@@ -78,7 +78,9 @@ class RecorderOutput(StreamOutput):
|
||||
|
||||
def write_segment(segment: Segment) -> None:
|
||||
"""Write a segment to output."""
|
||||
# fmt: off
|
||||
nonlocal output, output_v, output_a, last_stream_id, running_duration, last_sequence
|
||||
# fmt: on
|
||||
# Because the stream_worker is in a different thread from the record service,
|
||||
# the lookback segments may still have some overlap with the recorder segments
|
||||
if segment.sequence <= last_sequence:
|
||||
|
@@ -153,7 +153,9 @@ class SynoDSMCamera(SynologyDSMBaseEntity[SynologyDSMCameraUpdateCoordinator], C
|
||||
if not self.available:
|
||||
return None
|
||||
try:
|
||||
return await self._api.surveillance_station.get_camera_image(self.entity_description.key, self.snapshot_quality) # type: ignore[no-any-return]
|
||||
return await self._api.surveillance_station.get_camera_image( # type: ignore[no-any-return]
|
||||
self.entity_description.key, self.snapshot_quality
|
||||
)
|
||||
except (
|
||||
SynologyDSMAPIErrorException,
|
||||
SynologyDSMRequestException,
|
||||
|
@@ -57,7 +57,8 @@ from .template_entity import TemplateEntity, rewrite_common_legacy_to_modern_con
|
||||
from .trigger_entity import TriggerEntity
|
||||
|
||||
CHECK_FORECAST_KEYS = (
|
||||
set().union(Forecast.__annotations__.keys())
|
||||
set()
|
||||
.union(Forecast.__annotations__.keys())
|
||||
# Manually add the forecast resulting attributes that only exists
|
||||
# as native_* in the Forecast definition
|
||||
.union(("apparent_temperature", "wind_gust_speed", "dew_point"))
|
||||
|
@@ -119,9 +119,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Handle Memo Text service call."""
|
||||
memo_text = call.data[CONF_MEMO_TEXT]
|
||||
memo_text.hass = hass
|
||||
await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].get_module(
|
||||
call.data[CONF_ADDRESS]
|
||||
).set_memo_text(memo_text.async_render())
|
||||
await (
|
||||
hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"]
|
||||
.get_module(call.data[CONF_ADDRESS])
|
||||
.set_memo_text(memo_text.async_render())
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
@@ -48,12 +48,12 @@ class VeSyncSensorEntityDescription(
|
||||
):
|
||||
"""Describe VeSync sensor entity."""
|
||||
|
||||
exists_fn: Callable[
|
||||
[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], bool
|
||||
] = lambda _: True
|
||||
update_fn: Callable[
|
||||
[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], None
|
||||
] = lambda _: None
|
||||
exists_fn: Callable[[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], bool] = (
|
||||
lambda _: True
|
||||
)
|
||||
update_fn: Callable[[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], None] = (
|
||||
lambda _: None
|
||||
)
|
||||
|
||||
|
||||
def update_energy(device):
|
||||
|
@@ -34,6 +34,7 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import entity_platform
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -294,22 +295,45 @@ class ViCareClimate(ViCareEntity, ClimateEntity):
|
||||
|
||||
def set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode and deactivate any existing programs."""
|
||||
vicare_program = HA_TO_VICARE_PRESET_HEATING.get(preset_mode)
|
||||
if vicare_program is None:
|
||||
raise ValueError(
|
||||
f"Cannot set invalid vicare program: {preset_mode}/{vicare_program}"
|
||||
target_program = HA_TO_VICARE_PRESET_HEATING.get(preset_mode)
|
||||
if target_program is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="program_unknown",
|
||||
translation_placeholders={
|
||||
"preset": preset_mode,
|
||||
},
|
||||
)
|
||||
|
||||
_LOGGER.debug("Setting preset to %s / %s", preset_mode, vicare_program)
|
||||
if self._current_program != VICARE_PROGRAM_NORMAL:
|
||||
_LOGGER.debug("Current preset %s", self._current_program)
|
||||
if self._current_program and self._current_program != VICARE_PROGRAM_NORMAL:
|
||||
# We can't deactivate "normal"
|
||||
_LOGGER.debug("deactivating %s", self._current_program)
|
||||
try:
|
||||
self._circuit.deactivateProgram(self._current_program)
|
||||
except PyViCareCommandError:
|
||||
_LOGGER.debug("Unable to deactivate program %s", self._current_program)
|
||||
if vicare_program != VICARE_PROGRAM_NORMAL:
|
||||
# And we can't explicitly activate normal, either
|
||||
self._circuit.activateProgram(vicare_program)
|
||||
except PyViCareCommandError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="program_not_deactivated",
|
||||
translation_placeholders={
|
||||
"program": self._current_program,
|
||||
},
|
||||
) from err
|
||||
|
||||
_LOGGER.debug("Setting preset to %s / %s", preset_mode, target_program)
|
||||
if target_program != VICARE_PROGRAM_NORMAL:
|
||||
# And we can't explicitly activate "normal", either
|
||||
_LOGGER.debug("activating %s", target_program)
|
||||
try:
|
||||
self._circuit.activateProgram(target_program)
|
||||
except PyViCareCommandError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="program_not_activated",
|
||||
translation_placeholders={
|
||||
"program": target_program,
|
||||
},
|
||||
) from err
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
|
@@ -288,6 +288,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"program_unknown": {
|
||||
"message": "Cannot translate preset {preset} into a valid ViCare program"
|
||||
},
|
||||
"program_not_activated": {
|
||||
"message": "Unable to activate ViCare program {program}"
|
||||
},
|
||||
"program_not_deactivated": {
|
||||
"message": "Unable to deactivate ViCare program {program}"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_vicare_mode": {
|
||||
"name": "Set ViCare mode",
|
||||
|
@@ -28,9 +28,9 @@ NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"]
|
||||
class VodafoneStationBaseEntityDescription:
|
||||
"""Vodafone Station entity base description."""
|
||||
|
||||
value: Callable[
|
||||
[Any, Any], Any
|
||||
] = lambda coordinator, key: coordinator.data.sensors[key]
|
||||
value: Callable[[Any, Any], Any] = (
|
||||
lambda coordinator, key: coordinator.data.sensors[key]
|
||||
)
|
||||
is_suitable: Callable[[dict], bool] = lambda val: True
|
||||
|
||||
|
||||
|
@@ -111,11 +111,13 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol):
|
||||
valid_protocol_factory=lambda call_info, rtcp_state: make_protocol(
|
||||
hass, devices, call_info, rtcp_state
|
||||
),
|
||||
invalid_protocol_factory=lambda call_info, rtcp_state: PreRecordMessageProtocol(
|
||||
invalid_protocol_factory=(
|
||||
lambda call_info, rtcp_state: PreRecordMessageProtocol(
|
||||
hass,
|
||||
"not_configured.pcm",
|
||||
opus_payload_type=call_info.opus_payload_type,
|
||||
rtcp_state=rtcp_state,
|
||||
)
|
||||
),
|
||||
)
|
||||
self.hass = hass
|
||||
|
@@ -95,9 +95,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.upnp_description = discovery_info.ssdp_location
|
||||
|
||||
# ssdp_location and hostname have been checked in check_yamaha_ssdp so it is safe to ignore type assignment
|
||||
self.host = urlparse(
|
||||
discovery_info.ssdp_location
|
||||
).hostname # type: ignore[assignment]
|
||||
self.host = urlparse(discovery_info.ssdp_location).hostname # type: ignore[assignment]
|
||||
|
||||
await self.async_set_unique_id(self.serial_number)
|
||||
self._abort_if_unique_id_configured(
|
||||
|
@@ -276,9 +276,7 @@ async def async_setup_entry(
|
||||
if state_key == "0":
|
||||
continue
|
||||
|
||||
notification_description: NotificationZWaveJSEntityDescription | None = (
|
||||
None
|
||||
)
|
||||
notification_description: NotificationZWaveJSEntityDescription | None = None
|
||||
|
||||
for description in NOTIFICATION_SENSOR_MAPPINGS:
|
||||
if (
|
||||
|
@@ -344,7 +344,8 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity):
|
||||
is not None
|
||||
and (extra_data := await self.async_get_last_extra_data())
|
||||
and (
|
||||
latest_version_firmware := ZWaveNodeFirmwareUpdateExtraStoredData.from_dict(
|
||||
latest_version_firmware
|
||||
:= ZWaveNodeFirmwareUpdateExtraStoredData.from_dict(
|
||||
extra_data.as_dict()
|
||||
).latest_version_firmware
|
||||
)
|
||||
|
@@ -251,7 +251,9 @@ def async_track_state_change(
|
||||
return async_track_state_change_event(hass, entity_ids, state_change_listener)
|
||||
|
||||
return hass.bus.async_listen(
|
||||
EVENT_STATE_CHANGED, state_change_dispatcher, event_filter=state_change_filter # type: ignore[arg-type]
|
||||
EVENT_STATE_CHANGED,
|
||||
state_change_dispatcher, # type: ignore[arg-type]
|
||||
event_filter=state_change_filter, # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
|
||||
@@ -761,7 +763,8 @@ class _TrackStateChangeFiltered:
|
||||
@callback
|
||||
def _setup_all_listener(self) -> None:
|
||||
self._listeners[_ALL_LISTENER] = self.hass.bus.async_listen(
|
||||
EVENT_STATE_CHANGED, self._action # type: ignore[arg-type]
|
||||
EVENT_STATE_CHANGED,
|
||||
self._action, # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
|
||||
@@ -1335,7 +1338,8 @@ def async_track_same_state(
|
||||
|
||||
if entity_ids == MATCH_ALL:
|
||||
async_remove_state_for_cancel = hass.bus.async_listen(
|
||||
EVENT_STATE_CHANGED, state_for_cancel_listener # type: ignore[arg-type]
|
||||
EVENT_STATE_CHANGED,
|
||||
state_for_cancel_listener, # type: ignore[arg-type]
|
||||
)
|
||||
else:
|
||||
async_remove_state_for_cancel = async_track_state_change_event(
|
||||
|
@@ -190,7 +190,8 @@ class RestoreStateData:
|
||||
state, self.entities[state.entity_id].extra_restore_state_data, now
|
||||
)
|
||||
for state in all_states
|
||||
if state.entity_id in self.entities and
|
||||
if state.entity_id in self.entities
|
||||
and
|
||||
# Ignore all states that are entity registry placeholders
|
||||
not state.attributes.get(ATTR_RESTORED)
|
||||
]
|
||||
|
@@ -99,8 +99,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
|
||||
# Pick a random microsecond in range 0.05..0.50 to stagger the refreshes
|
||||
# and avoid a thundering herd.
|
||||
self._microsecond = (
|
||||
randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX)
|
||||
/ 10**6
|
||||
randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX) / 10**6
|
||||
)
|
||||
|
||||
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}
|
||||
|
@@ -403,9 +403,7 @@ async def async_get_zeroconf(
|
||||
hass: HomeAssistant,
|
||||
) -> dict[str, list[dict[str, str | dict[str, str]]]]:
|
||||
"""Return cached list of zeroconf types."""
|
||||
zeroconf: dict[
|
||||
str, list[dict[str, str | dict[str, str]]]
|
||||
] = ZEROCONF.copy() # type: ignore[assignment]
|
||||
zeroconf: dict[str, list[dict[str, str | dict[str, str]]]] = ZEROCONF.copy() # type: ignore[assignment]
|
||||
|
||||
integrations = await async_get_custom_components(hass)
|
||||
for integration in integrations.values():
|
||||
@@ -1013,9 +1011,7 @@ def _load_file(
|
||||
Async friendly.
|
||||
"""
|
||||
with suppress(KeyError):
|
||||
return hass.data[DATA_COMPONENTS][ # type: ignore[no-any-return]
|
||||
comp_or_platform
|
||||
]
|
||||
return hass.data[DATA_COMPONENTS][comp_or_platform] # type: ignore[no-any-return]
|
||||
|
||||
cache = hass.data[DATA_COMPONENTS]
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# Automatically generated by gen_requirements_all.py, do not edit
|
||||
|
||||
aiodiscover==1.5.1
|
||||
aiohttp-fast-url-dispatcher==0.1.0
|
||||
aiohttp-fast-url-dispatcher==0.3.0
|
||||
aiohttp-zlib-ng==0.1.1
|
||||
aiohttp==3.8.5;python_version<'3.12'
|
||||
aiohttp==3.9.0;python_version>='3.12'
|
||||
|
@@ -57,7 +57,8 @@ def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObject
|
||||
|
||||
|
||||
def load_json(
|
||||
filename: str | PathLike, default: JsonValueType = _SENTINEL # type: ignore[assignment]
|
||||
filename: str | PathLike,
|
||||
default: JsonValueType = _SENTINEL, # type: ignore[assignment]
|
||||
) -> JsonValueType:
|
||||
"""Load JSON data from a file.
|
||||
|
||||
@@ -79,7 +80,8 @@ def load_json(
|
||||
|
||||
|
||||
def load_json_array(
|
||||
filename: str | PathLike, default: JsonArrayType = _SENTINEL # type: ignore[assignment]
|
||||
filename: str | PathLike,
|
||||
default: JsonArrayType = _SENTINEL, # type: ignore[assignment]
|
||||
) -> JsonArrayType:
|
||||
"""Load JSON data from a file and return as list.
|
||||
|
||||
@@ -98,7 +100,8 @@ def load_json_array(
|
||||
|
||||
|
||||
def load_json_object(
|
||||
filename: str | PathLike, default: JsonObjectType = _SENTINEL # type: ignore[assignment]
|
||||
filename: str | PathLike,
|
||||
default: JsonObjectType = _SENTINEL, # type: ignore[assignment]
|
||||
) -> JsonObjectType:
|
||||
"""Load JSON data from a file and return as dict.
|
||||
|
||||
|
@@ -129,6 +129,7 @@ def vincenty(
|
||||
uSq = cosSqAlpha * (AXIS_A**2 - AXIS_B**2) / (AXIS_B**2)
|
||||
A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq)))
|
||||
B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq)))
|
||||
# fmt: off
|
||||
deltaSigma = (
|
||||
B
|
||||
* sinSigma
|
||||
@@ -146,6 +147,7 @@ def vincenty(
|
||||
)
|
||||
)
|
||||
)
|
||||
# fmt: on
|
||||
s = AXIS_B * A * (sigma - deltaSigma)
|
||||
|
||||
s /= 1000 # Conversion of meters to kilometers
|
||||
|
@@ -340,7 +340,12 @@ def _handle_mapping_tag(
|
||||
raise yaml.MarkedYAMLError(
|
||||
context=f'invalid key: "{key}"',
|
||||
context_mark=yaml.Mark(
|
||||
fname, 0, line, -1, None, None # type: ignore[arg-type]
|
||||
fname,
|
||||
0,
|
||||
line,
|
||||
-1,
|
||||
None,
|
||||
None, # type: ignore[arg-type]
|
||||
),
|
||||
) from exc
|
||||
|
||||
|
@@ -26,7 +26,7 @@ dependencies = [
|
||||
"aiohttp==3.9.0;python_version>='3.12'",
|
||||
"aiohttp==3.8.5;python_version<'3.12'",
|
||||
"aiohttp_cors==0.7.0",
|
||||
"aiohttp-fast-url-dispatcher==0.1.0",
|
||||
"aiohttp-fast-url-dispatcher==0.3.0",
|
||||
"aiohttp-zlib-ng==0.1.1",
|
||||
"astral==2.2",
|
||||
"attrs==23.1.0",
|
||||
@@ -79,9 +79,6 @@ include-package-data = true
|
||||
[tool.setuptools.packages.find]
|
||||
include = ["homeassistant*"]
|
||||
|
||||
[tool.black]
|
||||
extend-exclude = "/generated/"
|
||||
|
||||
[tool.pylint.MAIN]
|
||||
py-version = "3.11"
|
||||
ignore = [
|
||||
@@ -128,7 +125,7 @@ class-const-naming-style = "any"
|
||||
|
||||
[tool.pylint."MESSAGES CONTROL"]
|
||||
# Reasons disabled:
|
||||
# format - handled by black
|
||||
# format - handled by ruff
|
||||
# locally-disabled - it spams too much
|
||||
# duplicate-code - unavoidable
|
||||
# cyclic-import - doesn't test if both import on load
|
||||
|
@@ -6,7 +6,7 @@
|
||||
aiohttp==3.9.0;python_version>='3.12'
|
||||
aiohttp==3.8.5;python_version<'3.12'
|
||||
aiohttp_cors==0.7.0
|
||||
aiohttp-fast-url-dispatcher==0.1.0
|
||||
aiohttp-fast-url-dispatcher==0.3.0
|
||||
aiohttp-zlib-ng==0.1.1
|
||||
astral==2.2
|
||||
attrs==23.1.0
|
||||
|
@@ -257,7 +257,7 @@ aioharmony==0.2.10
|
||||
aiohomekit==3.0.9
|
||||
|
||||
# homeassistant.components.http
|
||||
aiohttp-fast-url-dispatcher==0.1.0
|
||||
aiohttp-fast-url-dispatcher==0.3.0
|
||||
|
||||
# homeassistant.components.http
|
||||
aiohttp-zlib-ng==0.1.1
|
||||
@@ -890,7 +890,7 @@ georss-qld-bushfire-alert-client==0.5
|
||||
getmac==0.8.2
|
||||
|
||||
# homeassistant.components.gios
|
||||
gios==3.2.1
|
||||
gios==3.2.2
|
||||
|
||||
# homeassistant.components.gitter
|
||||
gitterpy==0.1.7
|
||||
@@ -2354,7 +2354,7 @@ rfk101py==0.0.1
|
||||
rflink==0.0.65
|
||||
|
||||
# homeassistant.components.ring
|
||||
ring-doorbell[listen]==0.8.2
|
||||
ring-doorbell[listen]==0.8.3
|
||||
|
||||
# homeassistant.components.fleetgo
|
||||
ritassist==0.9.2
|
||||
@@ -2436,7 +2436,7 @@ sensorpush-ble==1.5.5
|
||||
sentry-sdk==1.37.1
|
||||
|
||||
# homeassistant.components.sfr_box
|
||||
sfrbox-api==0.0.6
|
||||
sfrbox-api==0.0.8
|
||||
|
||||
# homeassistant.components.sharkiq
|
||||
sharkiq==1.0.2
|
||||
|
@@ -233,7 +233,7 @@ aioharmony==0.2.10
|
||||
aiohomekit==3.0.9
|
||||
|
||||
# homeassistant.components.http
|
||||
aiohttp-fast-url-dispatcher==0.1.0
|
||||
aiohttp-fast-url-dispatcher==0.3.0
|
||||
|
||||
# homeassistant.components.http
|
||||
aiohttp-zlib-ng==0.1.1
|
||||
@@ -710,7 +710,7 @@ georss-qld-bushfire-alert-client==0.5
|
||||
getmac==0.8.2
|
||||
|
||||
# homeassistant.components.gios
|
||||
gios==3.2.1
|
||||
gios==3.2.2
|
||||
|
||||
# homeassistant.components.glances
|
||||
glances-api==0.4.3
|
||||
@@ -1760,7 +1760,7 @@ reolink-aio==0.8.1
|
||||
rflink==0.0.65
|
||||
|
||||
# homeassistant.components.ring
|
||||
ring-doorbell[listen]==0.8.2
|
||||
ring-doorbell[listen]==0.8.3
|
||||
|
||||
# homeassistant.components.roku
|
||||
rokuecp==0.18.1
|
||||
@@ -1815,7 +1815,7 @@ sensorpush-ble==1.5.5
|
||||
sentry-sdk==1.37.1
|
||||
|
||||
# homeassistant.components.sfr_box
|
||||
sfrbox-api==0.0.6
|
||||
sfrbox-api==0.0.8
|
||||
|
||||
# homeassistant.components.sharkiq
|
||||
sharkiq==1.0.2
|
||||
|
@@ -1,6 +1,5 @@
|
||||
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
||||
|
||||
black==23.11.0
|
||||
codespell==2.2.2
|
||||
ruff==0.1.1
|
||||
ruff==0.1.6
|
||||
yamllint==1.32.0
|
||||
|
@@ -1,10 +1,10 @@
|
||||
#!/bin/sh
|
||||
# Format code with black.
|
||||
# Format code with ruff-format.
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
black \
|
||||
ruff \
|
||||
format \
|
||||
--check \
|
||||
--fast \
|
||||
--quiet \
|
||||
homeassistant tests script *.py
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user