Compare commits

..

2 Commits

Author SHA1 Message Date
Jan Čermák
8df4152d4e Disable unnecessary parts to test image build 2026-02-24 17:18:39 +01:00
Jan Čermák
e6ed0b5d14 Use native ARM runner for builder action, update to builder 2026.02.1
Since 2026.02.0 the builder has sha-pinning fixed, so we can also get rid of
the Zizmor error suppression.

Builder changes:
* https://github.com/home-assistant/builder/releases/tag/2026.02.0
* https://github.com/home-assistant/builder/releases/tag/2026.02.1
2026-02-24 16:15:30 +01:00
244 changed files with 2249 additions and 17147 deletions

View File

@@ -57,10 +57,10 @@ jobs:
with:
type: ${{ env.BUILD_TYPE }}
- name: Verify version
uses: home-assistant/actions/helpers/verify-version@master # zizmor: ignore[unpinned-uses]
with:
ignore-dev: true
# - name: Verify version
# uses: home-assistant/actions/helpers/verify-version@master # zizmor: ignore[unpinned-uses]
# with:
# ignore-dev: true
- name: Fail if translations files are checked in
run: |
@@ -341,282 +341,283 @@ jobs:
image: ${{ matrix.arch }}
args: |
$BUILD_ARGS \
--test \
--target /data/machine \
--cosign \
--machine "${{ needs.init.outputs.version }}=${{ matrix.machine }}"
publish_ha:
name: Publish version files
environment: ${{ needs.init.outputs.channel }}
if: github.repository_owner == 'home-assistant'
needs: ["init", "build_machine"]
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master # zizmor: ignore[unpinned-uses]
with:
name: ${{ secrets.GIT_NAME }}
email: ${{ secrets.GIT_EMAIL }}
token: ${{ secrets.GIT_TOKEN }}
- name: Update version file
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
with:
key: "homeassistant[]"
key-description: "Home Assistant Core"
version: ${{ needs.init.outputs.version }}
channel: ${{ needs.init.outputs.channel }}
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
- name: Update version file (stable -> beta)
if: needs.init.outputs.channel == 'stable'
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
with:
key: "homeassistant[]"
key-description: "Home Assistant Core"
version: ${{ needs.init.outputs.version }}
channel: beta
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
publish_container:
name: Publish meta container for ${{ matrix.registry }}
environment: ${{ needs.init.outputs.channel }}
if: github.repository_owner == 'home-assistant'
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read # To check out the repository
packages: write # To push to GHCR
id-token: write # For cosign signing
strategy:
fail-fast: false
matrix:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
cosign-release: "v2.5.3"
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Verify architecture image signatures
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
VERSION: ${{ needs.init.outputs.version }}
run: |
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
for arch in $ARCHS; do
echo "Verifying ${arch} image signature..."
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"
done
echo "✓ All images verified successfully"
# Generate all Docker tags based on version string
# Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
# Examples:
# 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
# 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
- name: Generate Docker metadata
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
with:
images: ${{ matrix.registry }}/home-assistant
sep-tags: ","
tags: |
type=raw,value=${{ needs.init.outputs.version }},priority=9999
type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.7.1
- name: Copy architecture images to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
VERSION: ${{ needs.init.outputs.version }}
run: |
# Use imagetools to copy image blobs directly between registries
# This preserves provenance/attestations and seems to be much faster than pull/push
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
for arch in $ARCHS; do
echo "Copying ${arch} image to DockerHub..."
for attempt in 1 2 3; do
if docker buildx imagetools create \
--tag "docker.io/homeassistant/${arch}-homeassistant:${VERSION}" \
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"; then
break
fi
echo "Attempt ${attempt} failed, retrying in 10 seconds..."
sleep 10
if [ "${attempt}" -eq 3 ]; then
echo "Failed after 3 attempts"
exit 1
fi
done
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${VERSION}"
done
- name: Create and push multi-arch manifests
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
REGISTRY: ${{ matrix.registry }}
VERSION: ${{ needs.init.outputs.version }}
META_TAGS: ${{ steps.meta.outputs.tags }}
run: |
# Build list of architecture images dynamically
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
ARCH_IMAGES=()
for arch in $ARCHS; do
ARCH_IMAGES+=("${REGISTRY}/${arch}-homeassistant:${VERSION}")
done
# Build list of all tags for single manifest creation
# Note: Using sep-tags=',' in metadata-action for easier parsing
TAG_ARGS=()
IFS=',' read -ra TAGS <<< "${META_TAGS}"
for tag in "${TAGS[@]}"; do
TAG_ARGS+=("--tag" "${tag}")
done
# Create manifest with ALL tags in a single operation (much faster!)
echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
# Sign each tag separately (signing requires individual tag names)
echo "Signing all tags..."
for tag in "${TAGS[@]}"; do
echo "Signing ${tag}"
cosign sign --yes "${tag}"
done
echo "All manifests created and signed successfully"
build_python:
name: Build PyPi package
environment: ${{ needs.init.outputs.channel }}
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read # To check out the repository
id-token: write # For PyPI trusted publishing
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: translations
- name: Extract translations
run: |
tar xvf translations.tar.gz
rm translations.tar.gz
- name: Build package
shell: bash
run: |
# Remove dist, build, and homeassistant.egg-info
# when build locally for testing!
pip install build
python -m build
- name: Upload package to PyPI
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
with:
skip-existing: true
hassfest-image:
name: Build and test hassfest image
runs-on: ubuntu-latest
permissions:
contents: read # To check out the repository
packages: write # To push to GHCR
attestations: write # For build provenance attestation
id-token: write # For build provenance attestation
needs: ["init"]
if: github.repository_owner == 'home-assistant'
env:
HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Login to GitHub Container Registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
load: true
tags: ${{ env.HASSFEST_IMAGE_TAG }}
- name: Run hassfest against core
run: docker run --rm -v "${GITHUB_WORKSPACE}":/github/workspace "${HASSFEST_IMAGE_TAG}" --core-path=/github/workspace
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
push: true
tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3.2.0
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
# publish_ha:
# name: Publish version files
# environment: ${{ needs.init.outputs.channel }}
# if: github.repository_owner == 'home-assistant'
# needs: ["init", "build_machine"]
# runs-on: ubuntu-latest
# permissions:
# contents: read
# steps:
# - name: Checkout the repository
# uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# with:
# persist-credentials: false
#
# - name: Initialize git
# uses: home-assistant/actions/helpers/git-init@master # zizmor: ignore[unpinned-uses]
# with:
# name: ${{ secrets.GIT_NAME }}
# email: ${{ secrets.GIT_EMAIL }}
# token: ${{ secrets.GIT_TOKEN }}
#
# - name: Update version file
# uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
# with:
# key: "homeassistant[]"
# key-description: "Home Assistant Core"
# version: ${{ needs.init.outputs.version }}
# channel: ${{ needs.init.outputs.channel }}
# exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
#
# - name: Update version file (stable -> beta)
# if: needs.init.outputs.channel == 'stable'
# uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
# with:
# key: "homeassistant[]"
# key-description: "Home Assistant Core"
# version: ${{ needs.init.outputs.version }}
# channel: beta
# exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
#
# publish_container:
# name: Publish meta container for ${{ matrix.registry }}
# environment: ${{ needs.init.outputs.channel }}
# if: github.repository_owner == 'home-assistant'
# needs: ["init", "build_base"]
# runs-on: ubuntu-latest
# permissions:
# contents: read # To check out the repository
# packages: write # To push to GHCR
# id-token: write # For cosign signing
# strategy:
# fail-fast: false
# matrix:
# registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
# steps:
# - name: Install Cosign
# uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
# with:
# cosign-release: "v2.5.3"
#
# - name: Login to DockerHub
# if: matrix.registry == 'docker.io/homeassistant'
# uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
# with:
# username: ${{ secrets.DOCKERHUB_USERNAME }}
# password: ${{ secrets.DOCKERHUB_TOKEN }}
#
# - name: Login to GitHub Container Registry
# uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
# with:
# registry: ghcr.io
# username: ${{ github.repository_owner }}
# password: ${{ secrets.GITHUB_TOKEN }}
#
# - name: Verify architecture image signatures
# shell: bash
# env:
# ARCHITECTURES: ${{ needs.init.outputs.architectures }}
# VERSION: ${{ needs.init.outputs.version }}
# run: |
# ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
# for arch in $ARCHS; do
# echo "Verifying ${arch} image signature..."
# cosign verify \
# --certificate-oidc-issuer https://token.actions.githubusercontent.com \
# --certificate-identity-regexp https://github.com/home-assistant/core/.* \
# "ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"
# done
# echo "✓ All images verified successfully"
#
# # Generate all Docker tags based on version string
# # Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
# # Examples:
# # 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
# # 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
# # 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
# - name: Generate Docker metadata
# id: meta
# uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
# with:
# images: ${{ matrix.registry }}/home-assistant
# sep-tags: ","
# tags: |
# type=raw,value=${{ needs.init.outputs.version }},priority=9999
# type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
# type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
# type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
# type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
# type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
# type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
#
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.7.1
#
# - name: Copy architecture images to DockerHub
# if: matrix.registry == 'docker.io/homeassistant'
# shell: bash
# env:
# ARCHITECTURES: ${{ needs.init.outputs.architectures }}
# VERSION: ${{ needs.init.outputs.version }}
# run: |
# # Use imagetools to copy image blobs directly between registries
# # This preserves provenance/attestations and seems to be much faster than pull/push
# ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
# for arch in $ARCHS; do
# echo "Copying ${arch} image to DockerHub..."
# for attempt in 1 2 3; do
# if docker buildx imagetools create \
# --tag "docker.io/homeassistant/${arch}-homeassistant:${VERSION}" \
# "ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"; then
# break
# fi
# echo "Attempt ${attempt} failed, retrying in 10 seconds..."
# sleep 10
# if [ "${attempt}" -eq 3 ]; then
# echo "Failed after 3 attempts"
# exit 1
# fi
# done
# cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${VERSION}"
# done
#
# - name: Create and push multi-arch manifests
# shell: bash
# env:
# ARCHITECTURES: ${{ needs.init.outputs.architectures }}
# REGISTRY: ${{ matrix.registry }}
# VERSION: ${{ needs.init.outputs.version }}
# META_TAGS: ${{ steps.meta.outputs.tags }}
# run: |
# # Build list of architecture images dynamically
# ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
# ARCH_IMAGES=()
# for arch in $ARCHS; do
# ARCH_IMAGES+=("${REGISTRY}/${arch}-homeassistant:${VERSION}")
# done
#
# # Build list of all tags for single manifest creation
# # Note: Using sep-tags=',' in metadata-action for easier parsing
# TAG_ARGS=()
# IFS=',' read -ra TAGS <<< "${META_TAGS}"
# for tag in "${TAGS[@]}"; do
# TAG_ARGS+=("--tag" "${tag}")
# done
#
# # Create manifest with ALL tags in a single operation (much faster!)
# echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
# docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
#
# # Sign each tag separately (signing requires individual tag names)
# echo "Signing all tags..."
# for tag in "${TAGS[@]}"; do
# echo "Signing ${tag}"
# cosign sign --yes "${tag}"
# done
#
# echo "All manifests created and signed successfully"
#
# build_python:
# name: Build PyPi package
# environment: ${{ needs.init.outputs.channel }}
# needs: ["init", "build_base"]
# runs-on: ubuntu-latest
# permissions:
# contents: read # To check out the repository
# id-token: write # For PyPI trusted publishing
# if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
# steps:
# - name: Checkout the repository
# uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# with:
# persist-credentials: false
#
# - name: Set up Python ${{ env.DEFAULT_PYTHON }}
# uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
# with:
# python-version: ${{ env.DEFAULT_PYTHON }}
#
# - name: Download translations
# uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
# with:
# name: translations
#
# - name: Extract translations
# run: |
# tar xvf translations.tar.gz
# rm translations.tar.gz
#
# - name: Build package
# shell: bash
# run: |
# # Remove dist, build, and homeassistant.egg-info
# # when build locally for testing!
# pip install build
# python -m build
#
# - name: Upload package to PyPI
# uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
# with:
# skip-existing: true
#
# hassfest-image:
# name: Build and test hassfest image
# runs-on: ubuntu-latest
# permissions:
# contents: read # To check out the repository
# packages: write # To push to GHCR
# attestations: write # For build provenance attestation
# id-token: write # For build provenance attestation
# needs: ["init"]
# if: github.repository_owner == 'home-assistant'
# env:
# HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest
# HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
# steps:
# - name: Checkout repository
# uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# with:
# persist-credentials: false
#
# - name: Login to GitHub Container Registry
# uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
# with:
# registry: ghcr.io
# username: ${{ github.repository_owner }}
# password: ${{ secrets.GITHUB_TOKEN }}
#
# - name: Build Docker image
# uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
# with:
# context: . # So action will not pull the repository again
# file: ./script/hassfest/docker/Dockerfile
# load: true
# tags: ${{ env.HASSFEST_IMAGE_TAG }}
#
# - name: Run hassfest against core
# run: docker run --rm -v "${GITHUB_WORKSPACE}":/github/workspace "${HASSFEST_IMAGE_TAG}" --core-path=/github/workspace
#
# - name: Push Docker image
# if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
# id: push
# uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
# with:
# context: . # So action will not pull the repository again
# file: ./script/hassfest/docker/Dockerfile
# push: true
# tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest
#
# - name: Generate artifact attestation
# if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
# uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3.2.0
# with:
# subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
# subject-digest: ${{ steps.push.outputs.digest }}
# push-to-registry: true

1
CODEOWNERS generated
View File

@@ -1966,7 +1966,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/zone/ @home-assistant/core
/tests/components/zone/ @home-assistant/core
/homeassistant/components/zoneminder/ @rohankapoorcom @nabbi
/tests/components/zoneminder/ @rohankapoorcom @nabbi
/homeassistant/components/zwave_js/ @home-assistant/z-wave
/tests/components/zwave_js/ @home-assistant/z-wave
/homeassistant/components/zwave_me/ @lawfulchaos @Z-Wave-Me @PoltoS

View File

@@ -4,16 +4,7 @@ from __future__ import annotations
import logging
from airos.airos6 import AirOS6
from airos.airos8 import AirOS8
from airos.exceptions import (
AirOSConnectionAuthenticationError,
AirOSConnectionSetupError,
AirOSDataMissingError,
AirOSDeviceConnectionError,
AirOSKeyDataMissingError,
)
from airos.helpers import DetectDeviceData, async_get_firmware_data
from homeassistant.const import (
CONF_HOST,
@@ -24,11 +15,6 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
ConfigEntryNotReady,
)
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
@@ -53,40 +39,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
)
conn_data = {
CONF_HOST: entry.data[CONF_HOST],
CONF_USERNAME: entry.data[CONF_USERNAME],
CONF_PASSWORD: entry.data[CONF_PASSWORD],
"use_ssl": entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
"session": session,
}
# Determine firmware version before creating the device instance
try:
device_data: DetectDeviceData = await async_get_firmware_data(**conn_data)
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
TimeoutError,
) as err:
raise ConfigEntryNotReady from err
except (
AirOSConnectionAuthenticationError,
AirOSDataMissingError,
) as err:
raise ConfigEntryAuthFailed from err
except AirOSKeyDataMissingError as err:
raise ConfigEntryError("key_data_missing") from err
except Exception as err:
raise ConfigEntryError("unknown") from err
airos_class: type[AirOS8 | AirOS6] = (
AirOS8 if device_data["fw_major"] == 8 else AirOS6
airos_device = AirOS8(
host=entry.data[CONF_HOST],
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=session,
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
airos_device = airos_class(**conn_data)
coordinator = AirOSDataUpdateCoordinator(hass, entry, device_data, airos_device)
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator

View File

@@ -4,9 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Generic, TypeVar
from airos.data import AirOSDataBaseClass
import logging
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -20,24 +18,25 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
from .entity import AirOSEntity
PARALLEL_UPDATES = 0
_LOGGER = logging.getLogger(__name__)
AirOSDataModel = TypeVar("AirOSDataModel", bound=AirOSDataBaseClass)
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AirOSBinarySensorEntityDescription(
BinarySensorEntityDescription,
Generic[AirOSDataModel],
):
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describe an AirOS binary sensor."""
value_fn: Callable[[AirOSDataModel], bool]
value_fn: Callable[[AirOS8Data], bool]
AirOS8BinarySensorEntityDescription = AirOSBinarySensorEntityDescription[AirOS8Data]
COMMON_BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
AirOSBinarySensorEntityDescription(
key="portfw",
translation_key="port_forwarding",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.portfw,
),
AirOSBinarySensorEntityDescription(
key="dhcp_client",
translation_key="dhcp_client",
@@ -54,23 +53,6 @@ COMMON_BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
entity_registry_enabled_default=False,
),
AirOSBinarySensorEntityDescription(
key="pppoe",
translation_key="pppoe",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.pppoe,
entity_registry_enabled_default=False,
),
)
AIROS8_BINARY_SENSORS: tuple[AirOS8BinarySensorEntityDescription, ...] = (
AirOS8BinarySensorEntityDescription(
key="portfw",
translation_key="port_forwarding",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.portfw,
),
AirOS8BinarySensorEntityDescription(
key="dhcp6_server",
translation_key="dhcp6_server",
device_class=BinarySensorDeviceClass.RUNNING,
@@ -78,6 +60,14 @@ AIROS8_BINARY_SENSORS: tuple[AirOS8BinarySensorEntityDescription, ...] = (
value_fn=lambda data: data.services.dhcp6d_stateful,
entity_registry_enabled_default=False,
),
AirOSBinarySensorEntityDescription(
key="pppoe",
translation_key="pppoe",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.pppoe,
entity_registry_enabled_default=False,
),
)
@@ -89,20 +79,10 @@ async def async_setup_entry(
"""Set up the AirOS binary sensors from a config entry."""
coordinator = config_entry.runtime_data
entities: list[BinarySensorEntity] = []
entities.extend(
AirOSBinarySensor(coordinator, description)
for description in COMMON_BINARY_SENSORS
async_add_entities(
AirOSBinarySensor(coordinator, description) for description in BINARY_SENSORS
)
if coordinator.device_data["fw_major"] == 8:
entities.extend(
AirOSBinarySensor(coordinator, description)
for description in AIROS8_BINARY_SENSORS
)
async_add_entities(entities)
class AirOSBinarySensor(AirOSEntity, BinarySensorEntity):
"""Representation of a binary sensor."""

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
import logging
from airos.exceptions import AirOSException
from homeassistant.components.button import (
@@ -16,6 +18,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import DOMAIN, AirOSConfigEntry, AirOSDataUpdateCoordinator
from .entity import AirOSEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
REBOOT_BUTTON = ButtonEntityDescription(

View File

@@ -7,8 +7,6 @@ from collections.abc import Mapping
import logging
from typing import Any
from airos.airos6 import AirOS6
from airos.airos8 import AirOS8
from airos.discovery import airos_discover_devices
from airos.exceptions import (
AirOSConnectionAuthenticationError,
@@ -19,7 +17,6 @@ from airos.exceptions import (
AirOSKeyDataMissingError,
AirOSListenerError,
)
from airos.helpers import DetectDeviceData, async_get_firmware_data
import voluptuous as vol
from homeassistant.config_entries import (
@@ -56,11 +53,10 @@ from .const import (
MAC_ADDRESS,
SECTION_ADVANCED_SETTINGS,
)
from .coordinator import AirOS8
_LOGGER = logging.getLogger(__name__)
AirOSDeviceDetect = AirOS8 | AirOS6
# Discovery duration in seconds, airOS announces every 20 seconds
DISCOVER_INTERVAL: int = 30
@@ -96,7 +92,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self.airos_device: AirOSDeviceDetect
self.airos_device: AirOS8
self.errors: dict[str, str] = {}
self.discovered_devices: dict[str, dict[str, Any]] = {}
self.discovery_abort_reason: str | None = None
@@ -139,14 +135,16 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
)
airos_device = AirOS8(
host=config_data[CONF_HOST],
username=config_data[CONF_USERNAME],
password=config_data[CONF_PASSWORD],
session=session,
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
try:
device_data: DetectDeviceData = await async_get_firmware_data(
host=config_data[CONF_HOST],
username=config_data[CONF_USERNAME],
password=config_data[CONF_PASSWORD],
session=session,
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
await airos_device.login()
airos_data = await airos_device.status()
except (
AirOSConnectionSetupError,
@@ -161,14 +159,14 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception during credential validation")
self.errors["base"] = "unknown"
else:
await self.async_set_unique_id(device_data["mac"])
await self.async_set_unique_id(airos_data.derived.mac)
if self.source in [SOURCE_REAUTH, SOURCE_RECONFIGURE]:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured()
return {"title": device_data["hostname"], "data": config_data}
return {"title": airos_data.host.hostname, "data": config_data}
return None

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
import logging
from airos.airos6 import AirOS6, AirOS6Data
from airos.airos8 import AirOS8, AirOS8Data
from airos.exceptions import (
AirOSConnectionAuthenticationError,
@@ -12,7 +11,6 @@ from airos.exceptions import (
AirOSDataMissingError,
AirOSDeviceConnectionError,
)
from airos.helpers import DetectDeviceData
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
@@ -23,28 +21,19 @@ from .const import DOMAIN, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
AirOSDeviceDetect = AirOS8 | AirOS6
AirOSDataDetect = AirOS8Data | AirOS6Data
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSDataDetect]):
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
"""Class to manage fetching AirOS data from single endpoint."""
airos_device: AirOSDeviceDetect
config_entry: AirOSConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: AirOSConfigEntry,
device_data: DetectDeviceData,
airos_device: AirOSDeviceDetect,
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS8
) -> None:
"""Initialize the coordinator."""
self.airos_device = airos_device
self.device_data = device_data
super().__init__(
hass,
_LOGGER,
@@ -53,7 +42,7 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSDataDetect]):
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self) -> AirOSDataDetect:
async def _async_update_data(self) -> AirOS8Data:
"""Fetch data from AirOS."""
try:
await self.airos_device.login()
@@ -73,7 +62,7 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSDataDetect]):
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from err
except AirOSDataMissingError as err:
except (AirOSDataMissingError,) as err:
_LOGGER.error("Expected data not returned by airOS device: %s", err)
raise UpdateFailed(
translation_domain=DOMAIN,

View File

@@ -7,6 +7,6 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "platinum",
"quality_scale": "silver",
"requirements": ["airos==0.6.4"]
}

View File

@@ -42,20 +42,16 @@ rules:
# Gold
devices: done
diagnostics: done
discovery-update-info: done
discovery:
status: exempt
comment: No way to detect device on the network
discovery-update-info: todo
discovery: todo
docs-data-update: done
docs-examples: done
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: single airOS device per config entry; peer/remote endpoints are not modeled as child devices/entities at this time
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
@@ -65,10 +61,8 @@ rules:
status: exempt
comment: no (custom) icons used or envisioned
reconfiguration-flow: done
repair-issues: done
stale-devices:
status: exempt
comment: single airOS device per config entry; peer/remote endpoints are not modeled as child devices/entities at this time
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done

View File

@@ -5,14 +5,8 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
import logging
from typing import Generic, TypeVar
from airos.data import (
AirOSDataBaseClass,
DerivedWirelessMode,
DerivedWirelessRole,
NetRole,
)
from airos.data import DerivedWirelessMode, DerivedWirelessRole, NetRole
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -43,19 +37,15 @@ WIRELESS_ROLE_OPTIONS = [mode.value for mode in DerivedWirelessRole]
PARALLEL_UPDATES = 0
AirOSDataModel = TypeVar("AirOSDataModel", bound=AirOSDataBaseClass)
@dataclass(frozen=True, kw_only=True)
class AirOSSensorEntityDescription(SensorEntityDescription, Generic[AirOSDataModel]):
class AirOSSensorEntityDescription(SensorEntityDescription):
"""Describe an AirOS sensor."""
value_fn: Callable[[AirOSDataModel], StateType]
value_fn: Callable[[AirOS8Data], StateType]
AirOS8SensorEntityDescription = AirOSSensorEntityDescription[AirOS8Data]
COMMON_SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
AirOSSensorEntityDescription(
key="host_cpuload",
translation_key="host_cpuload",
@@ -85,6 +75,54 @@ COMMON_SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
translation_key="wireless_essid",
value_fn=lambda data: data.wireless.essid,
),
AirOSSensorEntityDescription(
key="wireless_antenna_gain",
translation_key="wireless_antenna_gain",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.antenna_gain,
),
AirOSSensorEntityDescription(
key="wireless_throughput_tx",
translation_key="wireless_throughput_tx",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.throughput.tx,
),
AirOSSensorEntityDescription(
key="wireless_throughput_rx",
translation_key="wireless_throughput_rx",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.throughput.rx,
),
AirOSSensorEntityDescription(
key="wireless_polling_dl_capacity",
translation_key="wireless_polling_dl_capacity",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.dl_capacity,
),
AirOSSensorEntityDescription(
key="wireless_polling_ul_capacity",
translation_key="wireless_polling_ul_capacity",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.ul_capacity,
),
AirOSSensorEntityDescription(
key="host_uptime",
translation_key="host_uptime",
@@ -120,57 +158,6 @@ COMMON_SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
options=WIRELESS_ROLE_OPTIONS,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_antenna_gain",
translation_key="wireless_antenna_gain",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.antenna_gain,
),
AirOSSensorEntityDescription(
key="wireless_polling_dl_capacity",
translation_key="wireless_polling_dl_capacity",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.dl_capacity,
),
AirOSSensorEntityDescription(
key="wireless_polling_ul_capacity",
translation_key="wireless_polling_ul_capacity",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.ul_capacity,
),
)
AIROS8_SENSORS: tuple[AirOS8SensorEntityDescription, ...] = (
AirOS8SensorEntityDescription(
key="wireless_throughput_tx",
translation_key="wireless_throughput_tx",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.throughput.tx,
),
AirOS8SensorEntityDescription(
key="wireless_throughput_rx",
translation_key="wireless_throughput_rx",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.throughput.rx,
),
)
@@ -182,14 +169,7 @@ async def async_setup_entry(
"""Set up the AirOS sensors from a config entry."""
coordinator = config_entry.runtime_data
async_add_entities(
AirOSSensor(coordinator, description) for description in COMMON_SENSORS
)
if coordinator.device_data["fw_major"] == 8:
async_add_entities(
AirOSSensor(coordinator, description) for description in AIROS8_SENSORS
)
async_add_entities(AirOSSensor(coordinator, description) for description in SENSORS)
class AirOSSensor(AirOSEntity, SensorEntity):

View File

@@ -8,6 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["anthropic==0.83.0"]
}

View File

@@ -1,4 +1,4 @@
"""The BSB-LAN integration."""
"""The BSB-Lan integration."""
import asyncio
import dataclasses
@@ -36,7 +36,7 @@ from .const import CONF_PASSKEY, DOMAIN
from .coordinator import BSBLanFastCoordinator, BSBLanSlowCoordinator
from .services import async_setup_services
PLATFORMS = [Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
@@ -56,13 +56,13 @@ class BSBLanData:
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the BSB-LAN integration."""
"""Set up the BSB-Lan integration."""
async_setup_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bool:
"""Set up BSB-LAN from a config entry."""
"""Set up BSB-Lan from a config entry."""
# create config using BSBLANConfig
config = BSBLANConfig(

View File

@@ -1,59 +0,0 @@
"""Button platform for BSB-Lan integration."""
from __future__ import annotations
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BSBLanConfigEntry, BSBLanData
from .coordinator import BSBLanFastCoordinator
from .entity import BSBLanEntity
from .helpers import async_sync_device_time
PARALLEL_UPDATES = 1
BUTTON_DESCRIPTIONS: tuple[ButtonEntityDescription, ...] = (
ButtonEntityDescription(
key="sync_time",
translation_key="sync_time",
entity_category=EntityCategory.CONFIG,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: BSBLanConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up BSB-Lan button entities from a config entry."""
data = entry.runtime_data
async_add_entities(
BSBLanButtonEntity(data.fast_coordinator, data, description)
for description in BUTTON_DESCRIPTIONS
)
class BSBLanButtonEntity(BSBLanEntity, ButtonEntity):
"""Defines a BSB-Lan button entity."""
entity_description: ButtonEntityDescription
def __init__(
self,
coordinator: BSBLanFastCoordinator,
data: BSBLanData,
description: ButtonEntityDescription,
) -> None:
"""Initialize BSB-Lan button entity."""
super().__init__(coordinator, data)
self.entity_description = description
self._attr_unique_id = f"{data.device.MAC}-{description.key}"
self._data = data
async def async_press(self) -> None:
"""Handle the button press."""
await async_sync_device_time(self._data.client, self._data.device.name)

View File

@@ -39,15 +39,15 @@ PRESET_MODES = [
PRESET_NONE,
]
# Mapping from Home Assistant HVACMode to BSB-LAN integer values
# BSB-LAN uses: 0=off, 1=auto, 2=eco/reduced, 3=heat/comfort
# Mapping from Home Assistant HVACMode to BSB-Lan integer values
# BSB-Lan uses: 0=off, 1=auto, 2=eco/reduced, 3=heat/comfort
HA_TO_BSBLAN_HVAC_MODE: Final[dict[HVACMode, int]] = {
HVACMode.OFF: 0,
HVACMode.AUTO: 1,
HVACMode.HEAT: 3,
}
# Mapping from BSB-LAN integer values to Home Assistant HVACMode
# Mapping from BSB-Lan integer values to Home Assistant HVACMode
BSBLAN_TO_HA_HVAC_MODE: Final[dict[int, HVACMode]] = {
0: HVACMode.OFF,
1: HVACMode.AUTO,
@@ -69,6 +69,7 @@ async def async_setup_entry(
class BSBLANClimate(BSBLanEntity, ClimateEntity):
"""Defines a BSBLAN climate device."""
_attr_has_entity_name = True
_attr_name = None
# Determine preset modes
_attr_supported_features = (
@@ -137,7 +138,7 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
@property
def preset_mode(self) -> str | None:
"""Return the current preset mode."""
# BSB-LAN mode 2 is eco/reduced mode
# BSB-Lan mode 2 is eco/reduced mode
if self._hvac_mode_value == 2:
return PRESET_ECO
return PRESET_NONE
@@ -162,7 +163,7 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
if ATTR_HVAC_MODE in kwargs:
data[ATTR_HVAC_MODE] = HA_TO_BSBLAN_HVAC_MODE[kwargs[ATTR_HVAC_MODE]]
if ATTR_PRESET_MODE in kwargs:
# eco preset uses BSB-LAN mode 2, none preset uses mode 1 (auto)
# eco preset uses BSB-Lan mode 2, none preset uses mode 1 (auto)
if kwargs[ATTR_PRESET_MODE] == PRESET_ECO:
data[ATTR_HVAC_MODE] = 2
elif kwargs[ATTR_PRESET_MODE] == PRESET_NONE:

View File

@@ -1,4 +1,4 @@
"""Config flow for BSB-LAN integration."""
"""Config flow for BSB-Lan integration."""
from __future__ import annotations

View File

@@ -1,4 +1,4 @@
"""Constants for the BSB-LAN integration."""
"""Constants for the BSB-Lan integration."""
from __future__ import annotations

View File

@@ -1,4 +1,4 @@
"""DataUpdateCoordinator for the BSB-LAN integration."""
"""DataUpdateCoordinator for the BSB-Lan integration."""
from __future__ import annotations
@@ -62,7 +62,7 @@ class BSBLanSlowData:
class BSBLanCoordinator[T](DataUpdateCoordinator[T]):
"""Base BSB-LAN coordinator."""
"""Base BSB-Lan coordinator."""
config_entry: BSBLanConfigEntry
@@ -74,7 +74,7 @@ class BSBLanCoordinator[T](DataUpdateCoordinator[T]):
name: str,
update_interval: timedelta,
) -> None:
"""Initialize the BSB-LAN coordinator."""
"""Initialize the BSB-Lan coordinator."""
super().__init__(
hass,
logger=LOGGER,
@@ -86,7 +86,7 @@ class BSBLanCoordinator[T](DataUpdateCoordinator[T]):
class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
"""The BSB-LAN fast update coordinator for frequently changing data."""
"""The BSB-Lan fast update coordinator for frequently changing data."""
def __init__(
self,
@@ -94,7 +94,7 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
config_entry: BSBLanConfigEntry,
client: BSBLAN,
) -> None:
"""Initialize the BSB-LAN fast coordinator."""
"""Initialize the BSB-Lan fast coordinator."""
super().__init__(
hass,
config_entry,
@@ -104,7 +104,7 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
)
async def _async_update_data(self) -> BSBLanFastData:
"""Fetch fast-changing data from the BSB-LAN device."""
"""Fetch fast-changing data from the BSB-Lan device."""
try:
# Client is already initialized in async_setup_entry
# Use include filtering to only fetch parameters we actually use
@@ -115,15 +115,12 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
except BSBLANAuthError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="coordinator_auth_error",
"Authentication failed for BSB-Lan device"
) from err
except BSBLANConnectionError as err:
host = self.config_entry.data[CONF_HOST]
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="coordinator_connection_error",
translation_placeholders={"host": host},
f"Error while establishing connection with BSB-Lan device at {host}"
) from err
return BSBLanFastData(
@@ -134,7 +131,7 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
"""The BSB-LAN slow update coordinator for infrequently changing data."""
"""The BSB-Lan slow update coordinator for infrequently changing data."""
def __init__(
self,
@@ -142,7 +139,7 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
config_entry: BSBLanConfigEntry,
client: BSBLAN,
) -> None:
"""Initialize the BSB-LAN slow coordinator."""
"""Initialize the BSB-Lan slow coordinator."""
super().__init__(
hass,
config_entry,
@@ -152,7 +149,7 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
)
async def _async_update_data(self) -> BSBLanSlowData:
"""Fetch slow-changing data from the BSB-LAN device."""
"""Fetch slow-changing data from the BSB-Lan device."""
try:
# Client is already initialized in async_setup_entry
# Use include filtering to only fetch parameters we actually use

View File

@@ -32,15 +32,6 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
model=(
data.info.device_identification.value
if data.info.device_identification
and data.info.device_identification.value
else None
),
model_id=(
f"{data.info.controller_family.value}_{data.info.controller_variant.value}"
if data.info.controller_family
and data.info.controller_variant
and data.info.controller_family.value
and data.info.controller_variant.value
else None
),
sw_version=data.device.version,

View File

@@ -1,42 +0,0 @@
"""Helper functions for BSB-Lan integration."""
from __future__ import annotations
from bsblan import BSBLAN, BSBLANError
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from .const import DOMAIN
async def async_sync_device_time(client: BSBLAN, device_name: str) -> None:
"""Synchronize BSB-LAN device time with Home Assistant.
Only updates if device time differs from Home Assistant time.
Args:
client: The BSB-LAN client instance.
device_name: The name of the device (used in error messages).
Raises:
HomeAssistantError: If the time sync operation fails.
"""
try:
device_time = await client.time()
current_time = dt_util.now()
current_time_str = current_time.strftime("%d.%m.%Y %H:%M:%S")
# Only sync if device time differs from HA time
if device_time.time.value != current_time_str:
await client.set_time(current_time_str)
except BSBLANError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="sync_time_failed",
translation_placeholders={
"device_name": device_name,
"error": str(err),
},
) from err

View File

@@ -1,11 +1,4 @@
{
"entity": {
"button": {
"sync_time": {
"default": "mdi:timer-sync-outline"
}
}
},
"services": {
"set_hot_water_schedule": {
"service": "mdi:calendar-clock"

View File

@@ -1,13 +1,12 @@
{
"domain": "bsblan",
"name": "BSB-LAN",
"name": "BSB-Lan",
"codeowners": ["@liudger"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/bsblan",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["bsblan"],
"quality_scale": "silver",
"requirements": ["python-bsblan==5.0.1"],
"zeroconf": [
{

View File

@@ -1,74 +0,0 @@
rules:
# Bronze
action-setup: done
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: done
test-coverage: done
# Gold
devices: done
diagnostics: done
discovery-update-info: done
discovery: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: |
This integration has a fixed single device.
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: |
This integration provides a limited number of entities, all of which are useful to users.
entity-translations: done
exception-translations: done
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration doesn't have any cases where raising an issue is needed.
stale-devices:
status: exempt
comment: |
This integration has a fixed single device.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -1,4 +1,4 @@
"""Support for BSB-LAN sensors."""
"""Support for BSB-Lan sensors."""
from __future__ import annotations
@@ -25,7 +25,7 @@ PARALLEL_UPDATES = 1
@dataclass(frozen=True, kw_only=True)
class BSBLanSensorEntityDescription(SensorEntityDescription):
"""Describes BSB-LAN sensor entity."""
"""Describes BSB-Lan sensor entity."""
value_fn: Callable[[BSBLanFastData], StateType]
exists_fn: Callable[[BSBLanFastData], bool] = lambda data: True
@@ -79,7 +79,7 @@ async def async_setup_entry(
entry: BSBLanConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up BSB-LAN sensor based on a config entry."""
"""Set up BSB-Lan sensor based on a config entry."""
data = entry.runtime_data
# Only create sensors for available data points
@@ -94,7 +94,7 @@ async def async_setup_entry(
class BSBLanSensor(BSBLanEntity, SensorEntity):
"""Defines a BSB-LAN sensor."""
"""Defines a BSB-Lan sensor."""
entity_description: BSBLanSensorEntityDescription
@@ -103,7 +103,7 @@ class BSBLanSensor(BSBLanEntity, SensorEntity):
data: BSBLanData,
description: BSBLanSensorEntityDescription,
) -> None:
"""Initialize BSB-LAN sensor."""
"""Initialize BSB-Lan sensor."""
super().__init__(data.fast_coordinator, data)
self.entity_description = description
self._attr_unique_id = f"{data.device.MAC}-{description.key}"

View File

@@ -1,4 +1,4 @@
"""Support for BSB-LAN services."""
"""Support for BSB-Lan services."""
from __future__ import annotations
@@ -13,9 +13,9 @@ from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.util import dt as dt_util
from .const import DOMAIN
from .helpers import async_sync_device_time
if TYPE_CHECKING:
from . import BSBLanConfigEntry
@@ -192,7 +192,7 @@ async def set_hot_water_schedule(service_call: ServiceCall) -> None:
)
try:
# Call the BSB-LAN API to set the schedule
# Call the BSB-Lan API to set the schedule
await client.set_hot_water_schedule(dhw_schedule)
except BSBLANError as err:
raise HomeAssistantError(
@@ -245,7 +245,25 @@ async def async_sync_time(service_call: ServiceCall) -> None:
)
client = entry.runtime_data.client
await async_sync_device_time(client, device_entry.name or device_id)
try:
# Get current device time
device_time = await client.time()
current_time = dt_util.now()
current_time_str = current_time.strftime("%d.%m.%Y %H:%M:%S")
# Only sync if device time differs from HA time
if device_time.time.value != current_time_str:
await client.set_time(current_time_str)
except BSBLANError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="sync_time_failed",
translation_placeholders={
"device_name": device_entry.name or device_id,
"error": str(err),
},
) from err
SYNC_TIME_SCHEMA = vol.Schema(
@@ -257,7 +275,7 @@ SYNC_TIME_SCHEMA = vol.Schema(
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register the BSB-LAN services."""
"""Register the BSB-Lan services."""
hass.services.async_register(
DOMAIN,
SERVICE_SET_HOT_WATER_SCHEDULE,

View File

@@ -22,8 +22,8 @@
"password": "[%key:component::bsblan::config::step::user::data_description::password%]",
"username": "[%key:component::bsblan::config::step::user::data_description::username%]"
},
"description": "A BSB-LAN device was discovered at {host}. Please provide credentials if required.",
"title": "BSB-LAN device discovered"
"description": "A BSB-Lan device was discovered at {host}. Please provide credentials if required.",
"title": "BSB-Lan device discovered"
},
"reauth_confirm": {
"data": {
@@ -36,7 +36,7 @@
"password": "[%key:component::bsblan::config::step::user::data_description::password%]",
"username": "[%key:component::bsblan::config::step::user::data_description::username%]"
},
"description": "The BSB-LAN integration needs to re-authenticate with {name}",
"description": "The BSB-Lan integration needs to re-authenticate with {name}",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
@@ -48,23 +48,18 @@
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"host": "The hostname or IP address of your BSB-LAN device.",
"passkey": "The passkey for your BSB-LAN device.",
"password": "The password for your BSB-LAN device.",
"port": "The port number of your BSB-LAN device.",
"username": "The username for your BSB-LAN device."
"host": "The hostname or IP address of your BSB-Lan device.",
"passkey": "The passkey for your BSB-Lan device.",
"password": "The password for your BSB-Lan device.",
"port": "The port number of your BSB-Lan device.",
"username": "The username for your BSB-Lan device."
},
"description": "Set up your BSB-LAN device to integrate with Home Assistant.",
"title": "Connect to the BSB-LAN device"
"description": "Set up your BSB-Lan device to integrate with Home Assistant.",
"title": "Connect to the BSB-Lan device"
}
}
},
"entity": {
"button": {
"sync_time": {
"name": "Sync time"
}
},
"sensor": {
"current_temperature": {
"name": "Current temperature"
@@ -81,12 +76,6 @@
"config_entry_not_loaded": {
"message": "The device `{device_name}` is not currently loaded or available"
},
"coordinator_auth_error": {
"message": "Authentication failed for BSB-LAN device"
},
"coordinator_connection_error": {
"message": "Error while establishing connection with BSB-LAN device at {host}"
},
"end_time_before_start_time": {
"message": "End time ({end_time}) must be after start time ({start_time})"
},
@@ -97,11 +86,14 @@
"message": "No configuration entry found for device: {device_id}"
},
"set_data_error": {
"message": "An error occurred while sending the data to the BSB-LAN device"
"message": "An error occurred while sending the data to the BSB-Lan device"
},
"set_operation_mode_error": {
"message": "An error occurred while setting the operation mode"
},
"set_preset_mode_error": {
"message": "Can't set preset mode to {preset_mode} when HVAC mode is not set to auto"
},
"set_schedule_failed": {
"message": "Failed to set hot water schedule: {error}"
},
@@ -112,7 +104,7 @@
"message": "Authentication failed while retrieving static device data"
},
"setup_connection_error": {
"message": "Failed to retrieve static device data from BSB-LAN device at {host}"
"message": "Failed to retrieve static device data from BSB-Lan device at {host}"
},
"setup_general_error": {
"message": "An unknown error occurred while retrieving static device data"
@@ -161,7 +153,7 @@
"name": "Set hot water schedule"
},
"sync_time": {
"description": "Synchronize Home Assistant time to the BSB-LAN device. Only updates if device time differs from Home Assistant time.",
"description": "Synchronize Home Assistant time to the BSB-Lan device. Only updates if device time differs from Home Assistant time.",
"fields": {
"device_id": {
"description": "The BSB-LAN device to sync time for.",

View File

@@ -63,7 +63,6 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
"""Defines a BSBLAN water heater entity."""
_attr_name = None
_attr_operation_list = list(HA_TO_BSBLAN_OPERATION_MODE.keys())
_attr_supported_features = (
WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.OPERATION_MODE
@@ -74,6 +73,7 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
"""Initialize BSBLAN water heater."""
super().__init__(data.fast_coordinator, data.slow_coordinator, data)
self._attr_unique_id = format_mac(data.device.MAC)
self._attr_operation_list = list(HA_TO_BSBLAN_OPERATION_MODE.keys())
# Set temperature unit
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit

View File

@@ -10,11 +10,9 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,
Platform.WATER_HEATER,
]

View File

@@ -1,189 +0,0 @@
"""Binary sensor platform for Compit integration."""
from dataclasses import dataclass
from compit_inext_api.consts import CompitParameter
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER_NAME
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
PARALLEL_UPDATES = 0
NO_SENSOR = "no_sensor"
ON_STATES = ["on", "yes", "charging", "alert", "exceeded"]
DESCRIPTIONS: dict[CompitParameter, BinarySensorEntityDescription] = {
CompitParameter.AIRING: BinarySensorEntityDescription(
key=CompitParameter.AIRING.value,
translation_key="airing",
device_class=BinarySensorDeviceClass.WINDOW,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.BATTERY_CHARGE_STATUS: BinarySensorEntityDescription(
key=CompitParameter.BATTERY_CHARGE_STATUS.value,
device_class=BinarySensorDeviceClass.BATTERY_CHARGING,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.CO2_ALERT: BinarySensorEntityDescription(
key=CompitParameter.CO2_ALERT.value,
translation_key="co2_alert",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.CO2_LEVEL: BinarySensorEntityDescription(
key=CompitParameter.CO2_LEVEL.value,
translation_key="co2_level",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.DUST_ALERT: BinarySensorEntityDescription(
key=CompitParameter.DUST_ALERT.value,
translation_key="dust_alert",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.PUMP_STATUS: BinarySensorEntityDescription(
key=CompitParameter.PUMP_STATUS.value,
translation_key="pump_status",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),
CompitParameter.TEMPERATURE_ALERT: BinarySensorEntityDescription(
key=CompitParameter.TEMPERATURE_ALERT.value,
translation_key="temperature_alert",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
}
@dataclass(frozen=True, kw_only=True)
class CompitDeviceDescription:
"""Class to describe a Compit device."""
name: str
parameters: dict[CompitParameter, BinarySensorEntityDescription]
DEVICE_DEFINITIONS: dict[int, CompitDeviceDescription] = {
12: CompitDeviceDescription(
name="Nano Color",
parameters={
CompitParameter.CO2_LEVEL: DESCRIPTIONS[CompitParameter.CO2_LEVEL],
},
),
78: CompitDeviceDescription(
name="SPM - Nano Color 2",
parameters={
CompitParameter.DUST_ALERT: DESCRIPTIONS[CompitParameter.DUST_ALERT],
CompitParameter.TEMPERATURE_ALERT: DESCRIPTIONS[
CompitParameter.TEMPERATURE_ALERT
],
CompitParameter.CO2_ALERT: DESCRIPTIONS[CompitParameter.CO2_ALERT],
},
),
223: CompitDeviceDescription(
name="Nano Color 2",
parameters={
CompitParameter.AIRING: DESCRIPTIONS[CompitParameter.AIRING],
CompitParameter.CO2_LEVEL: DESCRIPTIONS[CompitParameter.CO2_LEVEL],
},
),
225: CompitDeviceDescription(
name="SPM - Nano Color",
parameters={
CompitParameter.CO2_LEVEL: DESCRIPTIONS[CompitParameter.CO2_LEVEL],
},
),
226: CompitDeviceDescription(
name="AF-1",
parameters={
CompitParameter.BATTERY_CHARGE_STATUS: DESCRIPTIONS[
CompitParameter.BATTERY_CHARGE_STATUS
],
CompitParameter.PUMP_STATUS: DESCRIPTIONS[CompitParameter.PUMP_STATUS],
},
),
}
async def async_setup_entry(
hass: HomeAssistant,
entry: CompitConfigEntry,
async_add_devices: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Compit binary sensor entities from a config entry."""
coordinator = entry.runtime_data
async_add_devices(
CompitBinarySensor(
coordinator,
device_id,
device_definition.name,
code,
entity_description,
)
for device_id, device in coordinator.connector.all_devices.items()
if (device_definition := DEVICE_DEFINITIONS.get(device.definition.code))
for code, entity_description in device_definition.parameters.items()
if coordinator.connector.get_current_value(device_id, code) != NO_SENSOR
)
class CompitBinarySensor(
CoordinatorEntity[CompitDataUpdateCoordinator], BinarySensorEntity
):
"""Representation of a Compit binary sensor entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: CompitDataUpdateCoordinator,
device_id: int,
device_name: str,
parameter_code: CompitParameter,
entity_description: BinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor entity."""
super().__init__(coordinator)
self.device_id = device_id
self.entity_description = entity_description
self._attr_unique_id = f"{device_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, str(device_id))},
name=device_name,
manufacturer=MANUFACTURER_NAME,
model=device_name,
)
self.parameter_code = parameter_code
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
super().available
and self.coordinator.connector.get_device(self.device_id) is not None
)
@property
def is_on(self) -> bool | None:
"""Return the state of the binary sensor."""
value = self.coordinator.connector.get_current_value(
self.device_id, self.parameter_code
)
if value is None:
return None
return value in ON_STATES

View File

@@ -1,25 +1,5 @@
{
"entity": {
"binary_sensor": {
"airing": {
"default": "mdi:window-open-variant"
},
"co2_alert": {
"default": "mdi:alert"
},
"co2_level": {
"default": "mdi:molecule-co2"
},
"dust_alert": {
"default": "mdi:alert"
},
"pump_status": {
"default": "mdi:pump"
},
"temperature_alert": {
"default": "mdi:alert"
}
},
"number": {
"boiler_target_temperature": {
"default": "mdi:water-boiler"
@@ -158,119 +138,6 @@
"winter": "mdi:snowflake"
}
}
},
"sensor": {
"alarm_code": {
"default": "mdi:alert-circle",
"state": {
"no_alarm": "mdi:check-circle"
}
},
"battery_level": {
"default": "mdi:battery"
},
"boiler_temperature": {
"default": "mdi:thermometer"
},
"calculated_heating_temperature": {
"default": "mdi:thermometer"
},
"calculated_target_temperature": {
"default": "mdi:thermometer"
},
"charging_power": {
"default": "mdi:flash"
},
"circuit_target_temperature": {
"default": "mdi:thermometer"
},
"co2_percent": {
"default": "mdi:molecule-co2"
},
"collector_power": {
"default": "mdi:solar-power"
},
"collector_temperature": {
"default": "mdi:thermometer"
},
"dhw_measured_temperature": {
"default": "mdi:thermometer"
},
"energy_consumption": {
"default": "mdi:lightning-bolt"
},
"energy_smart_grid_yesterday": {
"default": "mdi:lightning-bolt"
},
"energy_today": {
"default": "mdi:lightning-bolt"
},
"energy_total": {
"default": "mdi:lightning-bolt"
},
"energy_yesterday": {
"default": "mdi:lightning-bolt"
},
"fuel_level": {
"default": "mdi:gauge"
},
"humidity": {
"default": "mdi:water-percent"
},
"mixer_temperature": {
"default": "mdi:thermometer"
},
"outdoor_temperature": {
"default": "mdi:thermometer"
},
"pk1_function": {
"default": "mdi:cog",
"state": {
"cooling": "mdi:snowflake-thermometer",
"off": "mdi:cog-off",
"summer": "mdi:weather-sunny",
"winter": "mdi:snowflake"
}
},
"pm10_level": {
"default": "mdi:air-filter",
"state": {
"exceeded": "mdi:alert",
"no_sensor": "mdi:cancel",
"normal": "mdi:air-filter",
"warning": "mdi:alert-circle-outline"
}
},
"pm25_level": {
"default": "mdi:air-filter",
"state": {
"exceeded": "mdi:alert",
"no_sensor": "mdi:cancel",
"normal": "mdi:air-filter",
"warning": "mdi:alert-circle-outline"
}
},
"return_circuit_temperature": {
"default": "mdi:thermometer"
},
"tank_temperature_t2": {
"default": "mdi:thermometer"
},
"tank_temperature_t3": {
"default": "mdi:thermometer"
},
"tank_temperature_t4": {
"default": "mdi:thermometer"
},
"target_heating_temperature": {
"default": "mdi:thermometer"
},
"ventilation_alarm": {
"default": "mdi:alert",
"state": {
"no_alarm": "mdi:check-circle"
}
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -33,26 +33,6 @@
}
},
"entity": {
"binary_sensor": {
"airing": {
"name": "Airing"
},
"co2_alert": {
"name": "CO2 alert"
},
"co2_level": {
"name": "CO2 level"
},
"dust_alert": {
"name": "Dust alert"
},
"pump_status": {
"name": "Pump status"
},
"temperature_alert": {
"name": "Temperature alert"
}
},
"number": {
"boiler_target_temperature": {
"name": "Boiler target temperature"
@@ -203,219 +183,6 @@
"winter": "Winter"
}
}
},
"sensor": {
"actual_buffer_temp": {
"name": "Actual buffer temperature"
},
"actual_dhw_temp": {
"name": "Actual DHW temperature"
},
"actual_hc_temperature_zone": {
"name": "Actual heating circuit {zone} temperature"
},
"actual_upper_source_temp": {
"name": "Actual upper source temperature"
},
"alarm_code": {
"name": "Alarm code",
"state": {
"battery_fault": "Battery fault",
"damaged_outdoor_temp": "Damaged outdoor temperature sensor",
"damaged_return_temp": "Damaged return temperature sensor",
"discharged_battery": "Discharged battery",
"internal_af": "Internal fault",
"low_battery_level": "Low battery level",
"no_alarm": "No alarm",
"no_battery": "No battery",
"no_power": "No power",
"no_pump": "No pump",
"pump_fault": "Pump fault"
}
},
"battery_level": {
"name": "Battery level"
},
"boiler_temperature": {
"name": "Boiler temperature"
},
"buffer_return_temperature": {
"name": "Buffer return temperature"
},
"buffer_set_temperature": {
"name": "Buffer set temperature"
},
"calculated_buffer_temp": {
"name": "Calculated buffer temperature"
},
"calculated_dhw_temp": {
"name": "Calculated DHW temperature"
},
"calculated_heating_temperature": {
"name": "Calculated heating temperature"
},
"calculated_target_temperature": {
"name": "Calculated target temperature"
},
"calculated_upper_source_temp": {
"name": "Calculated upper source temperature"
},
"charging_power": {
"name": "Charging power"
},
"circuit_target_temperature": {
"name": "Circuit target temperature"
},
"co2_percent": {
"name": "CO2 percent"
},
"collector_power": {
"name": "Collector power"
},
"collector_temperature": {
"name": "Collector temperature"
},
"dhw_measured_temperature": {
"name": "DHW measured temperature"
},
"dhw_temperature": {
"name": "DHW temperature"
},
"energy_consumption": {
"name": "Energy consumption"
},
"energy_smart_grid_yesterday": {
"name": "Energy smart grid yesterday"
},
"energy_today": {
"name": "Energy today"
},
"energy_total": {
"name": "Energy total"
},
"energy_yesterday": {
"name": "Energy yesterday"
},
"fuel_level": {
"name": "Fuel level"
},
"heating_target_temperature_zone": {
"name": "Heating circuit {zone} target temperature"
},
"lower_source_temperature": {
"name": "Lower source temperature"
},
"mixer_temperature": {
"name": "Mixer temperature"
},
"mixer_temperature_zone": {
"name": "Mixer {zone} temperature"
},
"outdoor_temperature": {
"name": "Outdoor temperature"
},
"pk1_function": {
"name": "PK1 function",
"state": {
"cooling": "Cooling",
"holiday": "Holiday",
"nano_nr_1": "Nano 1",
"nano_nr_2": "Nano 2",
"nano_nr_3": "Nano 3",
"nano_nr_4": "Nano 4",
"nano_nr_5": "Nano 5",
"off": "Off",
"on": "On",
"summer": "Summer",
"winter": "Winter"
}
},
"pm10_level": {
"name": "PM10 level",
"state": {
"exceeded": "Exceeded",
"no_sensor": "No sensor",
"normal": "Normal",
"warning": "Warning"
}
},
"pm1_level": {
"name": "PM1 level"
},
"pm25_level": {
"name": "PM2.5 level",
"state": {
"exceeded": "Exceeded",
"no_sensor": "No sensor",
"normal": "Normal",
"warning": "Warning"
}
},
"pm4_level": {
"name": "PM4 level"
},
"preset_mode": {
"name": "Preset mode"
},
"protection_temperature": {
"name": "Protection temperature"
},
"pump_status": {
"name": "Pump status",
"state": {
"off": "Off",
"on": "On"
}
},
"return_circuit_temperature": {
"name": "Return circuit temperature"
},
"set_target_temperature": {
"name": "Set target temperature"
},
"tank_temperature_t2": {
"name": "Tank T2 bottom temperature"
},
"tank_temperature_t3": {
"name": "Tank T3 top temperature"
},
"tank_temperature_t4": {
"name": "Tank T4 temperature"
},
"target_heating_temperature": {
"name": "Target heating temperature"
},
"target_temperature": {
"name": "Target temperature"
},
"temperature_alert": {
"name": "Temperature alert",
"state": {
"alert": "Alert",
"no_alert": "No alert"
}
},
"upper_source_temperature": {
"name": "Upper source temperature"
},
"ventilation_alarm": {
"name": "Ventilation alarm",
"state": {
"ahu_alarm": "AHU alarm",
"bot_alarm": "BOT alarm",
"damaged_exhaust_sensor": "Damaged exhaust sensor",
"damaged_preheater_sensor": "Damaged preheater sensor",
"damaged_supply_and_exhaust_sensors": "Damaged supply and exhaust sensors",
"damaged_supply_sensor": "Damaged supply sensor",
"no_alarm": "No alarm"
}
},
"ventilation_gear": {
"name": "Ventilation gear"
},
"weather_curve": {
"name": "Weather curve"
}
}
}
}

View File

@@ -2,17 +2,10 @@
from datetime import timedelta
from pyecobee import (
ECOBEE_API_KEY,
ECOBEE_PASSWORD,
ECOBEE_REFRESH_TOKEN,
ECOBEE_USERNAME,
Ecobee,
ExpiredTokenError,
)
from pyecobee import ECOBEE_API_KEY, ECOBEE_REFRESH_TOKEN, Ecobee, ExpiredTokenError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME
from homeassistant.const import CONF_API_KEY
from homeassistant.core import HomeAssistant
from homeassistant.util import Throttle
@@ -25,19 +18,10 @@ type EcobeeConfigEntry = ConfigEntry[EcobeeData]
async def async_setup_entry(hass: HomeAssistant, entry: EcobeeConfigEntry) -> bool:
"""Set up ecobee via a config entry."""
api_key = entry.data.get(CONF_API_KEY)
username = entry.data.get(CONF_USERNAME)
password = entry.data.get(CONF_PASSWORD)
api_key = entry.data[CONF_API_KEY]
refresh_token = entry.data[CONF_REFRESH_TOKEN]
runtime_data = EcobeeData(
hass,
entry,
api_key=api_key,
username=username,
password=password,
refresh_token=refresh_token,
)
runtime_data = EcobeeData(hass, entry, api_key=api_key, refresh_token=refresh_token)
if not await runtime_data.refresh():
return False
@@ -62,32 +46,14 @@ class EcobeeData:
"""
def __init__(
self,
hass: HomeAssistant,
entry: ConfigEntry,
api_key: str | None = None,
username: str | None = None,
password: str | None = None,
refresh_token: str | None = None,
self, hass: HomeAssistant, entry: ConfigEntry, api_key: str, refresh_token: str
) -> None:
"""Initialize the Ecobee data object."""
self._hass = hass
self.entry = entry
if api_key:
self.ecobee = Ecobee(
config={ECOBEE_API_KEY: api_key, ECOBEE_REFRESH_TOKEN: refresh_token}
)
elif username and password:
self.ecobee = Ecobee(
config={
ECOBEE_USERNAME: username,
ECOBEE_PASSWORD: password,
ECOBEE_REFRESH_TOKEN: refresh_token,
}
)
else:
raise ValueError("No ecobee credentials provided")
self.ecobee = Ecobee(
config={ECOBEE_API_KEY: api_key, ECOBEE_REFRESH_TOKEN: refresh_token}
)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def update(self):
@@ -103,23 +69,12 @@ class EcobeeData:
"""Refresh ecobee tokens and update config entry."""
_LOGGER.debug("Refreshing ecobee tokens and updating config entry")
if await self._hass.async_add_executor_job(self.ecobee.refresh_tokens):
data = {}
if self.ecobee.config.get(ECOBEE_API_KEY):
data = {
CONF_API_KEY: self.ecobee.config[ECOBEE_API_KEY],
CONF_REFRESH_TOKEN: self.ecobee.config[ECOBEE_REFRESH_TOKEN],
}
elif self.ecobee.config.get(ECOBEE_USERNAME) and self.ecobee.config.get(
ECOBEE_PASSWORD
):
data = {
CONF_USERNAME: self.ecobee.config[ECOBEE_USERNAME],
CONF_PASSWORD: self.ecobee.config[ECOBEE_PASSWORD],
CONF_REFRESH_TOKEN: self.ecobee.config[ECOBEE_REFRESH_TOKEN],
}
self._hass.config_entries.async_update_entry(
self.entry,
data=data,
data={
CONF_API_KEY: self.ecobee.config[ECOBEE_API_KEY],
CONF_REFRESH_TOKEN: self.ecobee.config[ECOBEE_REFRESH_TOKEN],
},
)
return True
_LOGGER.error("Error refreshing ecobee tokens")

View File

@@ -2,21 +2,15 @@
from typing import Any
from pyecobee import ECOBEE_API_KEY, ECOBEE_PASSWORD, ECOBEE_USERNAME, Ecobee
from pyecobee import ECOBEE_API_KEY, Ecobee
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME
from homeassistant.const import CONF_API_KEY
from .const import CONF_REFRESH_TOKEN, DOMAIN
_USER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_API_KEY): str,
vol.Optional(CONF_USERNAME): str,
vol.Optional(CONF_PASSWORD): str,
}
)
_USER_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str})
class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN):
@@ -33,34 +27,13 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN):
errors = {}
if user_input is not None:
api_key = user_input.get(CONF_API_KEY)
username = user_input.get(CONF_USERNAME)
password = user_input.get(CONF_PASSWORD)
# Use the user-supplied API key to attempt to obtain a PIN from ecobee.
self._ecobee = Ecobee(config={ECOBEE_API_KEY: user_input[CONF_API_KEY]})
if api_key and not (username or password):
# Use the user-supplied API key to attempt to obtain a PIN from ecobee.
self._ecobee = Ecobee(config={ECOBEE_API_KEY: api_key})
if await self.hass.async_add_executor_job(self._ecobee.request_pin):
# We have a PIN; move to the next step of the flow.
return await self.async_step_authorize()
errors["base"] = "pin_request_failed"
elif username and password and not api_key:
self._ecobee = Ecobee(
config={
ECOBEE_USERNAME: username,
ECOBEE_PASSWORD: password,
}
)
if await self.hass.async_add_executor_job(self._ecobee.refresh_tokens):
config = {
CONF_USERNAME: username,
CONF_PASSWORD: password,
CONF_REFRESH_TOKEN: self._ecobee.refresh_token,
}
return self.async_create_entry(title=DOMAIN, data=config)
errors["base"] = "login_failed"
else:
errors["base"] = "invalid_auth"
if await self.hass.async_add_executor_job(self._ecobee.request_pin):
# We have a PIN; move to the next step of the flow.
return await self.async_step_authorize()
errors["base"] = "pin_request_failed"
return self.async_show_form(
step_id="user",

View File

@@ -4,8 +4,6 @@
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
},
"error": {
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"login_failed": "Error authenticating with ecobee; please verify your credentials are correct.",
"pin_request_failed": "Error requesting PIN from ecobee; please verify API key is correct.",
"token_request_failed": "Error requesting tokens from ecobee; please try again."
},

View File

@@ -28,7 +28,6 @@ _LOGGER = logging.getLogger(__name__)
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
Platform.WATER_HEATER,

View File

@@ -5,7 +5,7 @@ from typing import Any
from pyeconet.equipment import EquipmentType
from pyeconet.equipment.thermostat import (
Thermostat,
ThermostatFanSpeed,
ThermostatFanMode,
ThermostatOperationMode,
)
@@ -16,7 +16,6 @@ from homeassistant.components.climate import (
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_TOP,
ClimateEntity,
ClimateEntityFeature,
HVACMode,
@@ -42,16 +41,13 @@ HA_STATE_TO_ECONET = {
if key != ThermostatOperationMode.EMERGENCY_HEAT
}
ECONET_FAN_SPEED_TO_HA = {
ThermostatFanSpeed.AUTO: FAN_AUTO,
ThermostatFanSpeed.LOW: FAN_LOW,
ThermostatFanSpeed.MEDIUM: FAN_MEDIUM,
ThermostatFanSpeed.HIGH: FAN_HIGH,
ThermostatFanSpeed.MAX: FAN_TOP,
}
HA_FAN_STATE_TO_ECONET_FAN_SPEED = {
value: key for key, value in ECONET_FAN_SPEED_TO_HA.items()
ECONET_FAN_STATE_TO_HA = {
ThermostatFanMode.AUTO: FAN_AUTO,
ThermostatFanMode.LOW: FAN_LOW,
ThermostatFanMode.MEDIUM: FAN_MEDIUM,
ThermostatFanMode.HIGH: FAN_HIGH,
}
HA_FAN_STATE_TO_ECONET = {value: key for key, value in ECONET_FAN_STATE_TO_HA.items()}
SUPPORT_FLAGS_THERMOSTAT = (
ClimateEntityFeature.TARGET_TEMPERATURE
@@ -107,7 +103,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
return self._econet.set_point
@property
def current_humidity(self) -> int | None:
def current_humidity(self) -> int:
"""Return the current humidity."""
return self._econet.humidity
@@ -153,7 +149,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
@property
def hvac_mode(self) -> HVACMode:
"""Return hvac operation i.e. heat, cool, mode.
"""Return hvac operation ie. heat, cool, mode.
Needs to be one of HVAC_MODE_*.
"""
@@ -178,35 +174,35 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
@property
def fan_mode(self) -> str:
"""Return the current fan mode."""
econet_fan_speed = self._econet.fan_speed
econet_fan_mode = self._econet.fan_mode
# Remove this after we figure out how to handle med lo and med hi
if econet_fan_speed in [ThermostatFanSpeed.MEDHI, ThermostatFanSpeed.MEDLO]:
econet_fan_speed = ThermostatFanSpeed.MEDIUM
if econet_fan_mode in [ThermostatFanMode.MEDHI, ThermostatFanMode.MEDLO]:
econet_fan_mode = ThermostatFanMode.MEDIUM
_current_fan_speed = FAN_AUTO
if econet_fan_speed is not None:
_current_fan_speed = ECONET_FAN_SPEED_TO_HA[econet_fan_speed]
return _current_fan_speed
_current_fan_mode = FAN_AUTO
if econet_fan_mode is not None:
_current_fan_mode = ECONET_FAN_STATE_TO_HA[econet_fan_mode]
return _current_fan_mode
@property
def fan_modes(self) -> list[str]:
"""Return the fan modes."""
# Remove the MEDLO MEDHI once we figure out how to handle it
return [
ECONET_FAN_SPEED_TO_HA[mode]
for mode in self._econet.fan_speeds
ECONET_FAN_STATE_TO_HA[mode]
for mode in self._econet.fan_modes
# Remove the MEDLO MEDHI once we figure out how to handle it
if mode
not in [
ThermostatFanSpeed.UNKNOWN,
ThermostatFanSpeed.MEDLO,
ThermostatFanSpeed.MEDHI,
ThermostatFanMode.UNKNOWN,
ThermostatFanMode.MEDLO,
ThermostatFanMode.MEDHI,
]
]
def set_fan_mode(self, fan_mode: str) -> None:
"""Set the fan mode."""
self._econet.set_fan_speed(HA_FAN_STATE_TO_ECONET_FAN_SPEED[fan_mode])
self._econet.set_fan_mode(HA_FAN_STATE_TO_ECONET[fan_mode])
@property
def min_temp(self) -> float:

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["paho_mqtt", "pyeconet"],
"requirements": ["pyeconet==0.2.1"]
"requirements": ["pyeconet==0.1.28"]
}

View File

@@ -1,53 +0,0 @@
"""Support for Rheem EcoNet thermostats with variable fan speeds and fan modes."""
from __future__ import annotations
from pyeconet.equipment import EquipmentType
from pyeconet.equipment.thermostat import Thermostat, ThermostatFanMode
from homeassistant.components.select import SelectEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import EconetConfigEntry
from .entity import EcoNetEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: EconetConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the econet thermostat select entity."""
equipment = entry.runtime_data
async_add_entities(
EconetFanModeSelect(thermostat)
for thermostat in equipment[EquipmentType.THERMOSTAT]
if thermostat.supports_fan_mode
)
class EconetFanModeSelect(EcoNetEntity[Thermostat], SelectEntity):
"""Select entity."""
def __init__(self, thermostat: Thermostat) -> None:
"""Initialize EcoNet platform."""
super().__init__(thermostat)
self._attr_name = f"{thermostat.device_name} fan mode"
self._attr_unique_id = (
f"{thermostat.device_id}_{thermostat.device_name}_fan_mode"
)
@property
def options(self) -> list[str]:
"""Return available select options."""
return [e.value for e in self._econet.fan_modes]
@property
def current_option(self) -> str:
"""Return current select option."""
return self._econet.fan_mode.value
def select_option(self, option: str) -> None:
"""Set the selected option."""
self._econet.set_fan_mode(ThermostatFanMode.by_string(option))

View File

@@ -23,20 +23,19 @@ async def async_setup_entry(
entry: EconetConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the econet thermostat switch entity."""
"""Set up the ecobee thermostat switch entity."""
equipment = entry.runtime_data
async_add_entities(
EcoNetSwitchAuxHeatOnly(thermostat)
for thermostat in equipment[EquipmentType.THERMOSTAT]
if ThermostatOperationMode.EMERGENCY_HEAT in thermostat.modes
)
class EcoNetSwitchAuxHeatOnly(EcoNetEntity[Thermostat], SwitchEntity):
"""Representation of an aux_heat_only EcoNet switch."""
"""Representation of a aux_heat_only EcoNet switch."""
def __init__(self, thermostat: Thermostat) -> None:
"""Initialize EcoNet platform."""
"""Initialize EcoNet ventilator platform."""
super().__init__(thermostat)
self._attr_name = f"{thermostat.device_name} emergency heat"
self._attr_unique_id = (

View File

@@ -12,7 +12,11 @@ import re
from typing import Any, TypedDict, cast
from fritzconnection import FritzConnection
from fritzconnection.core.exceptions import FritzActionError
from fritzconnection.core.exceptions import (
FritzActionError,
FritzConnectionException,
FritzSecurityError,
)
from fritzconnection.lib.fritzcall import FritzCall
from fritzconnection.lib.fritzhosts import FritzHosts
from fritzconnection.lib.fritzstatus import FritzStatus
@@ -43,7 +47,6 @@ from .const import (
DEFAULT_SSL,
DEFAULT_USERNAME,
DOMAIN,
FRITZ_AUTH_EXCEPTIONS,
FRITZ_EXCEPTIONS,
SCAN_INTERVAL,
MeshRoles,
@@ -422,18 +425,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
hosts_info: list[HostInfo] = []
try:
try:
hosts_attributes = cast(
list[HostAttributes],
await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_attributes
),
hosts_attributes = await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_attributes
)
except FritzActionError:
hosts_info = cast(
list[HostInfo],
await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_info
),
hosts_info = await self.hass.async_add_executor_job(
self.fritz_hosts.get_hosts_info
)
except Exception as ex:
if not self.hass.is_stopping:
@@ -589,7 +586,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
topology := await self.hass.async_add_executor_job(
self.fritz_hosts.get_mesh_topology
)
) or not isinstance(topology, dict):
):
raise Exception("Mesh supported but empty topology reported") # noqa: TRY002
except FritzActionError:
self.mesh_role = MeshRoles.SLAVE
@@ -745,7 +742,7 @@ class AvmWrapper(FritzBoxTools):
**kwargs,
)
)
except FRITZ_AUTH_EXCEPTIONS:
except FritzSecurityError:
_LOGGER.exception(
"Authorization Error: Please check the provided credentials and"
" verify that you can log into the web interface"
@@ -758,6 +755,12 @@ class AvmWrapper(FritzBoxTools):
action_name,
)
return {}
except FritzConnectionException:
_LOGGER.exception(
"Connection Error: Please check the device is properly configured"
" for remote login"
)
return {}
return result
async def async_get_upnp_configuration(self) -> dict[str, Any]:

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
import voluptuous as vol
@@ -71,11 +70,6 @@ class HomematicipCloudFlowHandler(ConfigFlow, domain=DOMAIN):
authtoken = await self.auth.async_register()
if authtoken:
_LOGGER.debug("Write config entry for HomematicIP Cloud")
if self.source == "reauth":
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates={HMIPC_AUTHTOKEN: authtoken},
)
return self.async_create_entry(
title=self.auth.config[HMIPC_HAPID],
data={
@@ -84,50 +78,11 @@ class HomematicipCloudFlowHandler(ConfigFlow, domain=DOMAIN):
HMIPC_NAME: self.auth.config.get(HMIPC_NAME),
},
)
if self.source == "reauth":
errors["base"] = "connection_aborted"
else:
return self.async_abort(reason="connection_aborted")
else:
errors["base"] = "press_the_button"
return self.async_abort(reason="connection_aborted")
errors["base"] = "press_the_button"
return self.async_show_form(step_id="link", errors=errors)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication when the auth token becomes invalid."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth confirmation and start link process."""
errors = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
config = {
HMIPC_HAPID: reauth_entry.data[HMIPC_HAPID],
HMIPC_PIN: user_input.get(HMIPC_PIN),
HMIPC_NAME: reauth_entry.data.get(HMIPC_NAME),
}
self.auth = HomematicipAuth(self.hass, config)
connected = await self.auth.async_setup()
if connected:
return await self.async_step_link()
errors["base"] = "invalid_sgtin_or_pin"
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Optional(HMIPC_PIN): str,
}
),
errors=errors,
)
async def async_step_import(self, import_data: dict[str, str]) -> ConfigFlowResult:
"""Import a new access point as a config entry."""
hapid = import_data[HMIPC_HAPID].replace("-", "").upper()

View File

@@ -1,27 +0,0 @@
"""Diagnostics support for HomematicIP Cloud."""
from __future__ import annotations
import json
from typing import Any
from homematicip.base.helpers import handle_config
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.core import HomeAssistant
from .hap import HomematicIPConfigEntry
TO_REDACT_CONFIG = {"city", "latitude", "longitude", "refreshToken"}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: HomematicIPConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
hap = config_entry.runtime_data
json_state = await hap.home.download_configuration_async()
anonymized = handle_config(json_state, anonymize=True)
config = json.loads(anonymized)
return async_redact_data(config, TO_REDACT_CONFIG)

View File

@@ -12,10 +12,7 @@ from homematicip.auth import Auth
from homematicip.base.enums import EventType
from homematicip.connection.connection_context import ConnectionContextBuilder
from homematicip.connection.rest_connection import RestConnection
from homematicip.exceptions.connection_exceptions import (
HmipAuthenticationError,
HmipConnectionError,
)
from homematicip.exceptions.connection_exceptions import HmipConnectionError
import homeassistant
from homeassistant.config_entries import ConfigEntry
@@ -195,12 +192,6 @@ class HomematicipHAP:
try:
await self.get_state()
break
except HmipAuthenticationError:
_LOGGER.error(
"Authentication error from HomematicIP Cloud, triggering reauth"
)
self.config_entry.async_start_reauth(self.hass)
break
except HmipConnectionError as err:
_LOGGER.warning(
"Get_state failed, retrying in %s seconds: %s", delay, err

View File

@@ -55,7 +55,7 @@ async def async_setup_entry(
entities: list[HomematicipGenericEntity] = []
entities.extend(
HomematicipColorLight(hap, d, ch.index)
HomematicipLightHS(hap, d, ch.index)
for d in hap.home.devices
for ch in d.functionalChannels
if ch.functionalChannelType == FunctionalChannelType.UNIVERSAL_LIGHT_CHANNEL
@@ -136,32 +136,16 @@ class HomematicipLight(HomematicipGenericEntity, LightEntity):
await self._device.turn_off_async()
class HomematicipColorLight(HomematicipGenericEntity, LightEntity):
"""Representation of the HomematicIP color light."""
class HomematicipLightHS(HomematicipGenericEntity, LightEntity):
"""Representation of the HomematicIP light with HS color mode."""
_attr_color_mode = ColorMode.HS
_attr_supported_color_modes = {ColorMode.HS}
def __init__(self, hap: HomematicipHAP, device: Device, channel_index: int) -> None:
"""Initialize the light entity."""
super().__init__(hap, device, channel=channel_index, is_multi_channel=True)
def _supports_color(self) -> bool:
"""Return true if device supports hue/saturation color control."""
channel = self.get_channel_or_raise()
return channel.hue is not None and channel.saturationLevel is not None
@property
def color_mode(self) -> ColorMode:
"""Return the color mode of the light."""
if self._supports_color():
return ColorMode.HS
return ColorMode.BRIGHTNESS
@property
def supported_color_modes(self) -> set[ColorMode]:
"""Return the supported color modes."""
if self._supports_color():
return {ColorMode.HS}
return {ColorMode.BRIGHTNESS}
@property
def is_on(self) -> bool:
"""Return true if light is on."""
@@ -188,26 +172,18 @@ class HomematicipColorLight(HomematicipGenericEntity, LightEntity):
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the light on."""
channel = self.get_channel_or_raise()
hs_color = kwargs.get(ATTR_HS_COLOR, (0.0, 0.0))
hue = hs_color[0] % 360.0
saturation = hs_color[1] / 100.0
dim_level = round(kwargs.get(ATTR_BRIGHTNESS, 255) / 255.0, 2)
if ATTR_HS_COLOR not in kwargs:
hue = channel.hue
saturation = channel.saturationLevel
if ATTR_BRIGHTNESS not in kwargs:
# If no brightness is set, use the current brightness
dim_level = channel.dimLevel or 1.0
# Use dim-only method for monochrome mode (hue/saturation not supported)
if not self._supports_color():
await channel.set_dim_level_async(dim_level=dim_level)
return
# Full color mode with hue/saturation
if ATTR_HS_COLOR in kwargs:
hs_color = kwargs[ATTR_HS_COLOR]
hue = hs_color[0] % 360.0
saturation = hs_color[1] / 100.0
else:
hue = channel.hue
saturation = channel.saturationLevel
await channel.set_hue_saturation_dim_level_async(
hue=hue, saturation_level=saturation, dim_level=dim_level
)

View File

@@ -3,11 +3,9 @@
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"connection_aborted": "[%key:common::config_flow::error::cannot_connect%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"error": {
"connection_aborted": "Registration failed, please try again.",
"invalid_sgtin_or_pin": "Invalid SGTIN or PIN code, please try again.",
"press_the_button": "Please press the blue button.",
"register_failed": "Failed to register, please try again.",
@@ -26,13 +24,6 @@
"link": {
"description": "Press the blue button on the access point and the **Submit** button to register Homematic IP with Home Assistant.\n\n![Location of button on bridge](/static/images/config_flows/config_homematicip_cloud.png)",
"title": "Link access point"
},
"reauth_confirm": {
"data": {
"pin": "[%key:common::config_flow::data::pin%]"
},
"description": "The authentication token for your HomematicIP access point is no longer valid. Press **Submit** and then press the blue button on your access point to re-register.",
"title": "Re-authenticate HomematicIP access point"
}
}
},

View File

@@ -10,7 +10,6 @@ override_schedule:
selector:
duration:
enable_day: true
enable_second: false
override_mode:
required: true
example: "mow"
@@ -33,7 +32,6 @@ override_schedule_work_area:
selector:
duration:
enable_day: true
enable_second: false
work_area_id:
required: true
example: "123"

View File

@@ -511,7 +511,7 @@
"description": "Lets the mower either mow or park for a given duration, overriding all schedules.",
"fields": {
"duration": {
"description": "Minimum: 1 minute, maximum: 42 days.",
"description": "Minimum: 1 minute, maximum: 42 days, seconds will be ignored.",
"name": "Duration"
},
"override_mode": {

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["imgw_pib==2.0.2"]
"requirements": ["imgw_pib==2.0.1"]
}

View File

@@ -24,7 +24,6 @@
"hydrological_alert": {
"name": "Hydrological alert",
"state": {
"exceeding_the_alarm_level": "Exceeding the alarm level",
"exceeding_the_warning_level": "Exceeding the warning level",
"hydrological_drought": "Hydrological drought",
"no_alert": "No alert",

View File

@@ -7,12 +7,7 @@ from homeassistant.core import HomeAssistant
from .coordinator import IndevoltConfigEntry, IndevoltCoordinator
PLATFORMS: list[Platform] = [
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
]
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH]
async def async_setup_entry(hass: HomeAssistant, entry: IndevoltConfigEntry) -> bool:

View File

@@ -1,111 +0,0 @@
"""Select platform for Indevolt integration."""
from __future__ import annotations
from dataclasses import dataclass, field
from typing import Final
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import IndevoltConfigEntry
from .coordinator import IndevoltCoordinator
from .entity import IndevoltEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class IndevoltSelectEntityDescription(SelectEntityDescription):
"""Custom entity description class for Indevolt select entities."""
read_key: str
write_key: str
value_to_option: dict[int, str]
unavailable_values: list[int] = field(default_factory=list)
generation: list[int] = field(default_factory=lambda: [1, 2])
SELECTS: Final = (
IndevoltSelectEntityDescription(
key="energy_mode",
translation_key="energy_mode",
read_key="7101",
write_key="47005",
value_to_option={
1: "self_consumed_prioritized",
4: "real_time_control",
5: "charge_discharge_schedule",
},
unavailable_values=[0],
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: IndevoltConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the select platform for Indevolt."""
coordinator = entry.runtime_data
device_gen = coordinator.generation
# Select initialization
async_add_entities(
IndevoltSelectEntity(coordinator=coordinator, description=description)
for description in SELECTS
if device_gen in description.generation
)
class IndevoltSelectEntity(IndevoltEntity, SelectEntity):
"""Represents a select entity for Indevolt devices."""
entity_description: IndevoltSelectEntityDescription
def __init__(
self,
coordinator: IndevoltCoordinator,
description: IndevoltSelectEntityDescription,
) -> None:
"""Initialize the Indevolt select entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{self.serial_number}_{description.key}"
self._attr_options = list(description.value_to_option.values())
self._option_to_value = {v: k for k, v in description.value_to_option.items()}
@property
def current_option(self) -> str | None:
"""Return the currently selected option."""
raw_value = self.coordinator.data.get(self.entity_description.read_key)
if raw_value is None:
return None
return self.entity_description.value_to_option.get(raw_value)
@property
def available(self) -> bool:
"""Return False when the device is in a mode that cannot be selected."""
if not super().available:
return False
raw_value = self.coordinator.data.get(self.entity_description.read_key)
return raw_value not in self.entity_description.unavailable_values
async def async_select_option(self, option: str) -> None:
"""Select a new option."""
value = self._option_to_value[option]
success = await self.coordinator.async_push_data(
self.entity_description.write_key, value
)
if success:
await self.coordinator.async_request_refresh()
else:
raise HomeAssistantError(f"Failed to set option {option} for {self.name}")

View File

@@ -37,16 +37,6 @@
"name": "Max AC output power"
}
},
"select": {
"energy_mode": {
"name": "[%key:component::indevolt::entity::sensor::energy_mode::name%]",
"state": {
"charge_discharge_schedule": "[%key:component::indevolt::entity::sensor::energy_mode::state::charge_discharge_schedule%]",
"real_time_control": "[%key:component::indevolt::entity::sensor::energy_mode::state::real_time_control%]",
"self_consumed_prioritized": "[%key:component::indevolt::entity::sensor::energy_mode::state::self_consumed_prioritized%]"
}
}
},
"sensor": {
"ac_input_power": {
"name": "AC input power"

View File

@@ -3,8 +3,6 @@
from __future__ import annotations
import asyncio
from datetime import datetime
import logging
from pyliebherrhomeapi import LiebherrClient
from pyliebherrhomeapi.exceptions import (
@@ -16,13 +14,8 @@ from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from .const import DEVICE_SCAN_INTERVAL, DOMAIN
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator, LiebherrData
_LOGGER = logging.getLogger(__name__)
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
PLATFORMS: list[Platform] = [
Platform.NUMBER,
@@ -49,7 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LiebherrConfigEntry) ->
raise ConfigEntryNotReady(f"Failed to connect to Liebherr API: {err}") from err
# Create a coordinator for each device (may be empty if no devices)
data = LiebherrData(client=client)
coordinators: dict[str, LiebherrCoordinator] = {}
for device in devices:
coordinator = LiebherrCoordinator(
hass=hass,
@@ -57,61 +50,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: LiebherrConfigEntry) ->
client=client,
device_id=device.device_id,
)
data.coordinators[device.device_id] = coordinator
coordinators[device.device_id] = coordinator
await asyncio.gather(
*(
coordinator.async_config_entry_first_refresh()
for coordinator in data.coordinators.values()
for coordinator in coordinators.values()
)
)
# Store runtime data
entry.runtime_data = data
# Store coordinators in runtime data
entry.runtime_data = coordinators
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Schedule periodic scan for new devices
async def _async_scan_for_new_devices(_now: datetime) -> None:
"""Scan for new devices added to the account."""
try:
devices = await client.get_devices()
except LiebherrAuthenticationError, LiebherrConnectionError:
_LOGGER.debug("Failed to scan for new devices")
return
except Exception:
_LOGGER.exception("Unexpected error scanning for new devices")
return
new_coordinators: list[LiebherrCoordinator] = []
for device in devices:
if device.device_id not in data.coordinators:
coordinator = LiebherrCoordinator(
hass=hass,
config_entry=entry,
client=client,
device_id=device.device_id,
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
_LOGGER.debug("Failed to set up new device %s", device.device_id)
continue
data.coordinators[device.device_id] = coordinator
new_coordinators.append(coordinator)
if new_coordinators:
async_dispatcher_send(
hass,
f"{DOMAIN}_new_device_{entry.entry_id}",
new_coordinators,
)
entry.async_on_unload(
async_track_time_interval(
hass, _async_scan_for_new_devices, DEVICE_SCAN_INTERVAL
)
)
return True

View File

@@ -6,6 +6,4 @@ from typing import Final
DOMAIN: Final = "liebherr"
MANUFACTURER: Final = "Liebherr"
SCAN_INTERVAL: Final = timedelta(seconds=60)
DEVICE_SCAN_INTERVAL: Final = timedelta(minutes=5)
REFRESH_DELAY: Final = timedelta(seconds=5)

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from dataclasses import dataclass, field
from datetime import timedelta
import logging
from pyliebherrhomeapi import (
@@ -18,20 +18,13 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL
from .const import DOMAIN
type LiebherrConfigEntry = ConfigEntry[dict[str, LiebherrCoordinator]]
_LOGGER = logging.getLogger(__name__)
@dataclass
class LiebherrData:
"""Runtime data for the Liebherr integration."""
client: LiebherrClient
coordinators: dict[str, LiebherrCoordinator] = field(default_factory=dict)
type LiebherrConfigEntry = ConfigEntry[LiebherrData]
SCAN_INTERVAL = timedelta(seconds=60)
class LiebherrCoordinator(DataUpdateCoordinator[DeviceState]):

View File

@@ -29,6 +29,6 @@ async def async_get_config_entry_diagnostics(
},
"data": asdict(coordinator.data),
}
for device_id, coordinator in entry.runtime_data.coordinators.items()
for device_id, coordinator in entry.runtime_data.items()
},
}

View File

@@ -16,11 +16,9 @@ from homeassistant.components.number import (
NumberEntityDescription,
)
from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
from .entity import LiebherrZoneEntity
@@ -55,41 +53,22 @@ NUMBER_TYPES: tuple[LiebherrNumberEntityDescription, ...] = (
)
def _create_number_entities(
coordinators: list[LiebherrCoordinator],
) -> list[LiebherrNumber]:
"""Create number entities for the given coordinators."""
return [
LiebherrNumber(
coordinator=coordinator,
zone_id=temp_control.zone_id,
description=description,
)
for coordinator in coordinators
for temp_control in coordinator.data.get_temperature_controls().values()
for description in NUMBER_TYPES
]
async def async_setup_entry(
hass: HomeAssistant,
entry: LiebherrConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Liebherr number entities."""
coordinators = entry.runtime_data
async_add_entities(
_create_number_entities(list(entry.runtime_data.coordinators.values()))
)
@callback
def _async_new_device(coordinators: list[LiebherrCoordinator]) -> None:
"""Add number entities for new devices."""
async_add_entities(_create_number_entities(coordinators))
entry.async_on_unload(
async_dispatcher_connect(
hass, f"{DOMAIN}_new_device_{entry.entry_id}", _async_new_device
LiebherrNumber(
coordinator=coordinator,
zone_id=temp_control.zone_id,
description=description,
)
for coordinator in coordinators.values()
for temp_control in coordinator.data.get_temperature_controls().values()
for description in NUMBER_TYPES
)

View File

@@ -53,7 +53,7 @@ rules:
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices: done
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default:

View File

@@ -18,11 +18,9 @@ from pyliebherrhomeapi import (
)
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
from .entity import ZONE_POSITION_MAP, LiebherrEntity
@@ -111,13 +109,15 @@ SELECT_TYPES: list[LiebherrSelectEntityDescription] = [
]
def _create_select_entities(
coordinators: list[LiebherrCoordinator],
) -> list[LiebherrSelectEntity]:
"""Create select entities for the given coordinators."""
async def async_setup_entry(
hass: HomeAssistant,
entry: LiebherrConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Liebherr select entities."""
entities: list[LiebherrSelectEntity] = []
for coordinator in coordinators:
for coordinator in entry.runtime_data.values():
has_multiple_zones = len(coordinator.data.get_temperature_controls()) > 1
for control in coordinator.data.controls:
@@ -137,29 +137,7 @@ def _create_select_entities(
)
)
return entities
async def async_setup_entry(
hass: HomeAssistant,
entry: LiebherrConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Liebherr select entities."""
async_add_entities(
_create_select_entities(list(entry.runtime_data.coordinators.values()))
)
@callback
def _async_new_device(coordinators: list[LiebherrCoordinator]) -> None:
"""Add select entities for new devices."""
async_add_entities(_create_select_entities(coordinators))
entry.async_on_unload(
async_dispatcher_connect(
hass, f"{DOMAIN}_new_device_{entry.entry_id}", _async_new_device
)
)
async_add_entities(entities)
class LiebherrSelectEntity(LiebherrEntity, SelectEntity):

View File

@@ -14,12 +14,10 @@ from homeassistant.components.sensor import (
SensorStateClass,
)
from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .const import DOMAIN
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
from .entity import LiebherrZoneEntity
@@ -50,41 +48,22 @@ SENSOR_TYPES: tuple[LiebherrSensorEntityDescription, ...] = (
)
def _create_sensor_entities(
coordinators: list[LiebherrCoordinator],
) -> list[LiebherrSensor]:
"""Create sensor entities for the given coordinators."""
return [
LiebherrSensor(
coordinator=coordinator,
zone_id=temp_control.zone_id,
description=description,
)
for coordinator in coordinators
for temp_control in coordinator.data.get_temperature_controls().values()
for description in SENSOR_TYPES
]
async def async_setup_entry(
hass: HomeAssistant,
entry: LiebherrConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Liebherr sensor entities."""
coordinators = entry.runtime_data
async_add_entities(
_create_sensor_entities(list(entry.runtime_data.coordinators.values()))
)
@callback
def _async_new_device(coordinators: list[LiebherrCoordinator]) -> None:
"""Add sensor entities for new devices."""
async_add_entities(_create_sensor_entities(coordinators))
entry.async_on_unload(
async_dispatcher_connect(
hass, f"{DOMAIN}_new_device_{entry.entry_id}", _async_new_device
LiebherrSensor(
coordinator=coordinator,
zone_id=temp_control.zone_id,
description=description,
)
for coordinator in coordinators.values()
for temp_control in coordinator.data.get_temperature_controls().values()
for description in SENSOR_TYPES
)

View File

@@ -15,11 +15,9 @@ from pyliebherrhomeapi.const import (
)
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
from .entity import ZONE_POSITION_MAP, LiebherrEntity
@@ -92,13 +90,15 @@ DEVICE_SWITCH_TYPES: dict[str, LiebherrDeviceSwitchEntityDescription] = {
}
def _create_switch_entities(
coordinators: list[LiebherrCoordinator],
) -> list[LiebherrDeviceSwitch | LiebherrZoneSwitch]:
"""Create switch entities for the given coordinators."""
async def async_setup_entry(
hass: HomeAssistant,
entry: LiebherrConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Liebherr switch entities."""
entities: list[LiebherrDeviceSwitch | LiebherrZoneSwitch] = []
for coordinator in coordinators:
for coordinator in entry.runtime_data.values():
has_multiple_zones = len(coordinator.data.get_temperature_controls()) > 1
for control in coordinator.data.controls:
@@ -127,29 +127,7 @@ def _create_switch_entities(
)
)
return entities
async def async_setup_entry(
hass: HomeAssistant,
entry: LiebherrConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Liebherr switch entities."""
async_add_entities(
_create_switch_entities(list(entry.runtime_data.coordinators.values()))
)
@callback
def _async_new_device(coordinators: list[LiebherrCoordinator]) -> None:
"""Add switch entities for new devices."""
async_add_entities(_create_switch_entities(coordinators))
entry.async_on_unload(
async_dispatcher_connect(
hass, f"{DOMAIN}_new_device_{entry.entry_id}", _async_new_device
)
)
async_add_entities(entities)
class LiebherrDeviceSwitch(LiebherrEntity, SwitchEntity):

View File

@@ -1,30 +0,0 @@
"""Diagnostics support for Met.no integration."""
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import HomeAssistant
from .coordinator import MetWeatherConfigEntry
TO_REDACT = [
CONF_LATITUDE,
CONF_LONGITUDE,
]
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: MetWeatherConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator_data = entry.runtime_data.data
return {
"entry_data": async_redact_data(entry.data, TO_REDACT),
"data": {
"current_weather_data": coordinator_data.current_weather_data,
"daily_forecast": coordinator_data.daily_forecast,
"hourly_forecast": coordinator_data.hourly_forecast,
},
}

View File

@@ -120,31 +120,6 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
self._discovered_name: str | None = None
self._pending_host: str | None = None
async def _async_validate_host(
self,
host: str,
errors: dict[str, str],
) -> tuple[dict[str, Any] | None, bool]:
"""Validate host connection and populate errors dict on failure.
Returns (info, needs_auth). When needs_auth is True, the caller
should store the host and redirect to the appropriate auth step.
"""
try:
return await validate_input(self.hass, host), False
except NRGkickApiClientApiDisabledError:
errors["base"] = "json_api_disabled"
except NRGkickApiClientAuthenticationError:
return None, True
except NRGkickApiClientInvalidResponseError:
errors["base"] = "invalid_response"
except NRGkickApiClientCommunicationError:
errors["base"] = "cannot_connect"
except NRGkickApiClientError:
_LOGGER.exception("Unexpected error")
errors["base"] = "unknown"
return None, False
async def _async_validate_credentials(
self,
host: str,
@@ -181,11 +156,21 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
except vol.Invalid:
errors["base"] = "cannot_connect"
else:
info, needs_auth = await self._async_validate_host(host, errors)
if needs_auth:
try:
info = await validate_input(self.hass, host)
except NRGkickApiClientApiDisabledError:
errors["base"] = "json_api_disabled"
except NRGkickApiClientAuthenticationError:
self._pending_host = host
return await self.async_step_user_auth()
if info:
except NRGkickApiClientInvalidResponseError:
errors["base"] = "invalid_response"
except NRGkickApiClientCommunicationError:
errors["base"] = "cannot_connect"
except NRGkickApiClientError:
_LOGGER.exception("Unexpected error")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(
info["serial"], raise_on_progress=False
)
@@ -213,8 +198,8 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
if info := await self._async_validate_credentials(
self._pending_host,
errors,
username=user_input.get(CONF_USERNAME),
password=user_input.get(CONF_PASSWORD),
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
):
await self.async_set_unique_id(info["serial"], raise_on_progress=False)
self._abort_if_unique_id_configured()
@@ -222,8 +207,8 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
title=info["title"],
data={
CONF_HOST: self._pending_host,
CONF_USERNAME: user_input.get(CONF_USERNAME),
CONF_PASSWORD: user_input.get(CONF_PASSWORD),
CONF_USERNAME: user_input[CONF_USERNAME],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
@@ -253,8 +238,8 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
if info := await self._async_validate_credentials(
reauth_entry.data[CONF_HOST],
errors,
username=user_input.get(CONF_USERNAME),
password=user_input.get(CONF_PASSWORD),
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
):
await self.async_set_unique_id(info["serial"], raise_on_progress=False)
self._abort_if_unique_id_mismatch()
@@ -272,83 +257,6 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of the integration."""
errors: dict[str, str] = {}
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
try:
host = _normalize_host(user_input[CONF_HOST])
except vol.Invalid:
errors["base"] = "cannot_connect"
else:
info, needs_auth = await self._async_validate_host(host, errors)
if needs_auth:
self._pending_host = host
return await self.async_step_reconfigure_auth()
if info:
await self.async_set_unique_id(
info["serial"], raise_on_progress=False
)
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
reconfigure_entry,
data_updates={CONF_HOST: host},
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
STEP_USER_DATA_SCHEMA,
reconfigure_entry.data,
),
errors=errors,
)
async def async_step_reconfigure_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration authentication step."""
errors: dict[str, str] = {}
if TYPE_CHECKING:
assert self._pending_host is not None
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
username = user_input.get(CONF_USERNAME)
password = user_input.get(CONF_PASSWORD)
if info := await self._async_validate_credentials(
self._pending_host,
errors,
username=username,
password=password,
):
await self.async_set_unique_id(info["serial"], raise_on_progress=False)
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
reconfigure_entry,
data_updates={
CONF_HOST: self._pending_host,
CONF_USERNAME: username,
CONF_PASSWORD: password,
},
)
return self.async_show_form(
step_id="reconfigure_auth",
data_schema=self.add_suggested_values_to_schema(
STEP_AUTH_DATA_SCHEMA,
reconfigure_entry.data,
),
errors=errors,
description_placeholders={
"device_ip": self._pending_host,
},
)
async def async_step_zeroconf(
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
@@ -413,13 +321,21 @@ class NRGkickConfigFlow(ConfigFlow, domain=DOMAIN):
assert self._discovered_name is not None
if user_input is not None:
info, needs_auth = await self._async_validate_host(
self._discovered_host, errors
)
if needs_auth:
try:
info = await validate_input(self.hass, self._discovered_host)
except NRGkickApiClientApiDisabledError:
errors["base"] = "json_api_disabled"
except NRGkickApiClientAuthenticationError:
self._pending_host = self._discovered_host
return await self.async_step_user_auth()
if info:
except NRGkickApiClientInvalidResponseError:
errors["base"] = "invalid_response"
except NRGkickApiClientCommunicationError:
errors["base"] = "cannot_connect"
except NRGkickApiClientError:
_LOGGER.exception("Unexpected error")
errors["base"] = "unknown"
else:
return self.async_create_entry(
title=info["title"], data={CONF_HOST: self._discovered_host}
)

View File

@@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/nrgkick",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"quality_scale": "bronze",
"requirements": ["nrgkick-api==1.7.1"],
"zeroconf": ["_nrgkick._tcp.local."]
}

View File

@@ -41,7 +41,7 @@ rules:
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
log-when-unavailable: todo
parallel-updates: done
reauthentication-flow: done
test-coverage: done
@@ -68,7 +68,7 @@ rules:
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: done
reconfiguration-flow: todo
repair-issues: todo
stale-devices:
status: exempt

View File

@@ -6,7 +6,6 @@
"json_api_disabled": "JSON API is disabled on the device. Enable it in the NRGkick mobile app under Extended \u2192 Local API \u2192 API Variants.",
"no_serial_number": "Device does not provide a serial number",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unique_id_mismatch": "The device does not match the previous device"
},
"error": {
@@ -29,26 +28,6 @@
},
"description": "Reauthenticate with your NRGkick device.\n\nGet your username and password in the NRGkick mobile app:\n1. Open the NRGkick mobile app \u2192 Extended \u2192 Local API\n2. Under Authentication (JSON), check or set your username and password"
},
"reconfigure": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
},
"data_description": {
"host": "[%key:component::nrgkick::config::step::user::data_description::host%]"
},
"description": "Reconfigure your NRGkick device. This allows you to change the IP address or hostname of your NRGkick device."
},
"reconfigure_auth": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"password": "[%key:component::nrgkick::config::step::user_auth::data_description::password%]",
"username": "[%key:component::nrgkick::config::step::user_auth::data_description::username%]"
},
"description": "[%key:component::nrgkick::config::step::user_auth::description%]"
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"

View File

@@ -50,7 +50,6 @@ from .const import (
CONF_TOP_P,
DEFAULT_AI_TASK_NAME,
DEFAULT_NAME,
DEFAULT_STT_NAME,
DEFAULT_TTS_NAME,
DOMAIN,
LOGGER,
@@ -58,7 +57,6 @@ from .const import (
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_REASONING_EFFORT,
RECOMMENDED_STT_OPTIONS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_TOP_P,
RECOMMENDED_TTS_OPTIONS,
@@ -68,7 +66,7 @@ from .entity import async_prepare_files_for_prompt
SERVICE_GENERATE_IMAGE = "generate_image"
SERVICE_GENERATE_CONTENT = "generate_content"
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION, Platform.STT, Platform.TTS)
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION, Platform.TTS)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
type OpenAIConfigEntry = ConfigEntry[openai.AsyncClient]
@@ -482,10 +480,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: OpenAIConfigEntry) ->
_add_tts_subentry(hass, entry)
hass.config_entries.async_update_entry(entry, minor_version=5)
if entry.version == 2 and entry.minor_version == 5:
_add_stt_subentry(hass, entry)
hass.config_entries.async_update_entry(entry, minor_version=6)
LOGGER.debug(
"Migration to version %s:%s successful", entry.version, entry.minor_version
)
@@ -506,19 +500,6 @@ def _add_ai_task_subentry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> None
)
def _add_stt_subentry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> None:
"""Add STT subentry to the config entry."""
hass.config_entries.async_add_subentry(
entry,
ConfigSubentry(
data=MappingProxyType(RECOMMENDED_STT_OPTIONS),
subentry_type="stt",
title=DEFAULT_STT_NAME,
unique_id=None,
),
)
def _add_tts_subentry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> None:
"""Add TTS subentry to the config entry."""
hass.config_entries.async_add_subentry(

View File

@@ -68,8 +68,6 @@ from .const import (
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT_AI_TASK_NAME,
DEFAULT_CONVERSATION_NAME,
DEFAULT_STT_NAME,
DEFAULT_STT_PROMPT,
DEFAULT_TTS_NAME,
DOMAIN,
RECOMMENDED_AI_TASK_OPTIONS,
@@ -80,8 +78,6 @@ from .const import (
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_REASONING_EFFORT,
RECOMMENDED_REASONING_SUMMARY,
RECOMMENDED_STT_MODEL,
RECOMMENDED_STT_OPTIONS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_TOP_P,
RECOMMENDED_TTS_OPTIONS,
@@ -114,14 +110,14 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
client = openai.AsyncOpenAI(
api_key=data[CONF_API_KEY], http_client=get_async_client(hass)
)
await client.models.list(timeout=10.0)
await hass.async_add_executor_job(client.with_options(timeout=10.0).models.list)
class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for OpenAI Conversation."""
VERSION = 2
MINOR_VERSION = 6
MINOR_VERSION = 5
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@@ -162,12 +158,6 @@ class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN):
"title": DEFAULT_AI_TASK_NAME,
"unique_id": None,
},
{
"subentry_type": "stt",
"data": RECOMMENDED_STT_OPTIONS,
"title": DEFAULT_STT_NAME,
"unique_id": None,
},
{
"subentry_type": "tts",
"data": RECOMMENDED_TTS_OPTIONS,
@@ -214,7 +204,6 @@ class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN):
return {
"conversation": OpenAISubentryFlowHandler,
"ai_task_data": OpenAISubentryFlowHandler,
"stt": OpenAISubentrySTTFlowHandler,
"tts": OpenAISubentryTTSFlowHandler,
}
@@ -606,95 +595,6 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
return location_data
class OpenAISubentrySTTFlowHandler(ConfigSubentryFlow):
"""Flow for managing OpenAI STT subentries."""
options: dict[str, Any]
@property
def _is_new(self) -> bool:
"""Return if this is a new subentry."""
return self.source == "user"
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Add a subentry."""
self.options = RECOMMENDED_STT_OPTIONS.copy()
return await self.async_step_init()
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Handle reconfiguration of a subentry."""
self.options = self._get_reconfigure_subentry().data.copy()
return await self.async_step_init()
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Manage initial options."""
# abort if entry is not loaded
if self._get_entry().state != ConfigEntryState.LOADED:
return self.async_abort(reason="entry_not_loaded")
options = self.options
errors: dict[str, str] = {}
step_schema: VolDictType = {}
if self._is_new:
step_schema[vol.Required(CONF_NAME, default=DEFAULT_STT_NAME)] = str
step_schema.update(
{
vol.Optional(
CONF_PROMPT,
description={
"suggested_value": options.get(CONF_PROMPT, DEFAULT_STT_PROMPT)
},
): TextSelector(
TextSelectorConfig(multiline=True, type=TextSelectorType.TEXT)
),
vol.Optional(
CONF_CHAT_MODEL, default=RECOMMENDED_STT_MODEL
): SelectSelector(
SelectSelectorConfig(
options=[
"gpt-4o-transcribe",
"gpt-4o-mini-transcribe",
"whisper-1",
],
mode=SelectSelectorMode.DROPDOWN,
custom_value=True,
)
),
}
)
if user_input is not None:
options.update(user_input)
if not errors:
if self._is_new:
return self.async_create_entry(
title=options.pop(CONF_NAME),
data=options,
)
return self.async_update_and_abort(
self._get_entry(),
self._get_reconfigure_subentry(),
data=options,
)
return self.async_show_form(
step_id="init",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(step_schema), options
),
errors=errors,
)
class OpenAISubentryTTSFlowHandler(ConfigSubentryFlow):
"""Flow for managing OpenAI TTS subentries."""

View File

@@ -1,7 +1,6 @@
"""Constants for the OpenAI Conversation integration."""
import logging
from typing import Any
from homeassistant.const import CONF_LLM_HASS_API
from homeassistant.helpers import llm
@@ -11,7 +10,6 @@ LOGGER: logging.Logger = logging.getLogger(__package__)
DEFAULT_CONVERSATION_NAME = "OpenAI Conversation"
DEFAULT_AI_TASK_NAME = "OpenAI AI Task"
DEFAULT_STT_NAME = "OpenAI STT"
DEFAULT_TTS_NAME = "OpenAI TTS"
DEFAULT_NAME = "OpenAI Conversation"
@@ -42,7 +40,6 @@ RECOMMENDED_IMAGE_MODEL = "gpt-image-1.5"
RECOMMENDED_MAX_TOKENS = 3000
RECOMMENDED_REASONING_EFFORT = "low"
RECOMMENDED_REASONING_SUMMARY = "auto"
RECOMMENDED_STT_MODEL = "gpt-4o-mini-transcribe"
RECOMMENDED_TEMPERATURE = 1.0
RECOMMENDED_TOP_P = 1.0
RECOMMENDED_TTS_SPEED = 1.0
@@ -51,9 +48,6 @@ RECOMMENDED_WEB_SEARCH = False
RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE = "medium"
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
RECOMMENDED_WEB_SEARCH_INLINE_CITATIONS = False
DEFAULT_STT_PROMPT = (
"The following conversation is a smart home user talking to Home Assistant."
)
UNSUPPORTED_MODELS: list[str] = [
"o1-mini",
@@ -114,7 +108,6 @@ RECOMMENDED_CONVERSATION_OPTIONS = {
RECOMMENDED_AI_TASK_OPTIONS = {
CONF_RECOMMENDED: True,
}
RECOMMENDED_STT_OPTIONS: dict[str, Any] = {}
RECOMMENDED_TTS_OPTIONS = {
CONF_PROMPT: "",
CONF_CHAT_MODEL: "gpt-4o-mini-tts",

View File

@@ -92,7 +92,6 @@ from .const import (
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_REASONING_EFFORT,
RECOMMENDED_REASONING_SUMMARY,
RECOMMENDED_STT_MODEL,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_TOP_P,
RECOMMENDED_VERBOSITY,
@@ -472,12 +471,7 @@ class OpenAIBaseLLMEntity(Entity):
identifiers={(DOMAIN, subentry.subentry_id)},
name=subentry.title,
manufacturer="OpenAI",
model=subentry.data.get(
CONF_CHAT_MODEL,
RECOMMENDED_CHAT_MODEL
if subentry.subentry_type != "stt"
else RECOMMENDED_STT_MODEL,
),
model=subentry.data.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
entry_type=dr.DeviceEntryType.SERVICE,
)

View File

@@ -146,30 +146,6 @@
}
}
},
"stt": {
"abort": {
"entry_not_loaded": "[%key:component::openai_conversation::config_subentries::conversation::abort::entry_not_loaded%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"entry_type": "Speech-to-text",
"initiate_flow": {
"reconfigure": "Reconfigure speech-to-text service",
"user": "Add speech-to-text service"
},
"step": {
"init": {
"data": {
"chat_model": "Model",
"name": "[%key:common::config_flow::data::name%]",
"prompt": "[%key:common::config_flow::data::prompt%]"
},
"data_description": {
"chat_model": "The model to use to transcribe speech.",
"prompt": "Use this prompt to improve the quality of the transcripts. Translate to the pipeline language for best results. See the documentation for more details."
}
}
}
},
"tts": {
"abort": {
"entry_not_loaded": "[%key:component::openai_conversation::config_subentries::conversation::abort::entry_not_loaded%]",

View File

@@ -1,196 +0,0 @@
"""Speech to text support for OpenAI."""
from __future__ import annotations
from collections.abc import AsyncIterable
import io
import logging
from typing import TYPE_CHECKING
import wave
from openai import OpenAIError
from homeassistant.components import stt
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import (
CONF_CHAT_MODEL,
CONF_PROMPT,
DEFAULT_STT_PROMPT,
RECOMMENDED_STT_MODEL,
)
from .entity import OpenAIBaseLLMEntity
if TYPE_CHECKING:
from . import OpenAIConfigEntry
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: OpenAIConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up STT entities."""
for subentry in config_entry.subentries.values():
if subentry.subentry_type != "stt":
continue
async_add_entities(
[OpenAISTTEntity(config_entry, subentry)],
config_subentry_id=subentry.subentry_id,
)
class OpenAISTTEntity(stt.SpeechToTextEntity, OpenAIBaseLLMEntity):
"""OpenAI Speech to text entity."""
@property
def supported_languages(self) -> list[str]:
"""Return a list of supported languages."""
# https://developers.openai.com/api/docs/guides/speech-to-text#supported-languages
# The model may also transcribe the audio in other languages but with lower quality
return [
"af-ZA", # Afrikaans
"ar-SA", # Arabic
"hy-AM", # Armenian
"az-AZ", # Azerbaijani
"be-BY", # Belarusian
"bs-BA", # Bosnian
"bg-BG", # Bulgarian
"ca-ES", # Catalan
"zh-CN", # Chinese (Mandarin)
"hr-HR", # Croatian
"cs-CZ", # Czech
"da-DK", # Danish
"nl-NL", # Dutch
"en-US", # English
"et-EE", # Estonian
"fi-FI", # Finnish
"fr-FR", # French
"gl-ES", # Galician
"de-DE", # German
"el-GR", # Greek
"he-IL", # Hebrew
"hi-IN", # Hindi
"hu-HU", # Hungarian
"is-IS", # Icelandic
"id-ID", # Indonesian
"it-IT", # Italian
"ja-JP", # Japanese
"kn-IN", # Kannada
"kk-KZ", # Kazakh
"ko-KR", # Korean
"lv-LV", # Latvian
"lt-LT", # Lithuanian
"mk-MK", # Macedonian
"ms-MY", # Malay
"mr-IN", # Marathi
"mi-NZ", # Maori
"ne-NP", # Nepali
"no-NO", # Norwegian
"fa-IR", # Persian
"pl-PL", # Polish
"pt-PT", # Portuguese
"ro-RO", # Romanian
"ru-RU", # Russian
"sr-RS", # Serbian
"sk-SK", # Slovak
"sl-SI", # Slovenian
"es-ES", # Spanish
"sw-KE", # Swahili
"sv-SE", # Swedish
"fil-PH", # Tagalog (Filipino)
"ta-IN", # Tamil
"th-TH", # Thai
"tr-TR", # Turkish
"uk-UA", # Ukrainian
"ur-PK", # Urdu
"vi-VN", # Vietnamese
"cy-GB", # Welsh
]
@property
def supported_formats(self) -> list[stt.AudioFormats]:
"""Return a list of supported formats."""
# https://developers.openai.com/api/docs/guides/speech-to-text#transcriptions
return [stt.AudioFormats.WAV, stt.AudioFormats.OGG]
@property
def supported_codecs(self) -> list[stt.AudioCodecs]:
"""Return a list of supported codecs."""
return [stt.AudioCodecs.PCM, stt.AudioCodecs.OPUS]
@property
def supported_bit_rates(self) -> list[stt.AudioBitRates]:
"""Return a list of supported bit rates."""
return [
stt.AudioBitRates.BITRATE_8,
stt.AudioBitRates.BITRATE_16,
stt.AudioBitRates.BITRATE_24,
stt.AudioBitRates.BITRATE_32,
]
@property
def supported_sample_rates(self) -> list[stt.AudioSampleRates]:
"""Return a list of supported sample rates."""
return [
stt.AudioSampleRates.SAMPLERATE_8000,
stt.AudioSampleRates.SAMPLERATE_11000,
stt.AudioSampleRates.SAMPLERATE_16000,
stt.AudioSampleRates.SAMPLERATE_18900,
stt.AudioSampleRates.SAMPLERATE_22000,
stt.AudioSampleRates.SAMPLERATE_32000,
stt.AudioSampleRates.SAMPLERATE_37800,
stt.AudioSampleRates.SAMPLERATE_44100,
stt.AudioSampleRates.SAMPLERATE_48000,
]
@property
def supported_channels(self) -> list[stt.AudioChannels]:
"""Return a list of supported channels."""
return [stt.AudioChannels.CHANNEL_MONO, stt.AudioChannels.CHANNEL_STEREO]
async def async_process_audio_stream(
self, metadata: stt.SpeechMetadata, stream: AsyncIterable[bytes]
) -> stt.SpeechResult:
"""Process an audio stream to STT service."""
audio_bytes = bytearray()
async for chunk in stream:
audio_bytes.extend(chunk)
audio_data = bytes(audio_bytes)
if metadata.format == stt.AudioFormats.WAV:
# Add missing wav header
wav_buffer = io.BytesIO()
with wave.open(wav_buffer, "wb") as wf:
wf.setnchannels(metadata.channel.value)
wf.setsampwidth(metadata.bit_rate.value // 8)
wf.setframerate(metadata.sample_rate.value)
wf.writeframes(audio_data)
audio_data = wav_buffer.getvalue()
options = self.subentry.data
client = self.entry.runtime_data
try:
response = await client.audio.transcriptions.create(
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_STT_MODEL),
file=(f"a.{metadata.format.value}", audio_data),
response_format="json",
language=metadata.language.split("-")[0],
prompt=options.get(CONF_PROMPT, DEFAULT_STT_PROMPT),
)
except OpenAIError:
_LOGGER.exception("Error during STT")
else:
if response.text:
return stt.SpeechResult(
response.text,
stt.SpeechResultState.SUCCESS,
)
return stt.SpeechResult(None, stt.SpeechResultState.ERROR)

View File

@@ -15,14 +15,12 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .const import CONTAINER_STATE_RUNNING, STACK_STATUS_ACTIVE
from .const import CONTAINER_STATE_RUNNING
from .coordinator import PortainerContainerData, PortainerCoordinator
from .entity import (
PortainerContainerEntity,
PortainerCoordinatorData,
PortainerEndpointEntity,
PortainerStackData,
PortainerStackEntity,
)
PARALLEL_UPDATES = 1
@@ -42,13 +40,6 @@ class PortainerEndpointBinarySensorEntityDescription(BinarySensorEntityDescripti
state_fn: Callable[[PortainerCoordinatorData], bool | None]
@dataclass(frozen=True, kw_only=True)
class PortainerStackBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Class to hold Portainer stack binary sensor description."""
state_fn: Callable[[PortainerStackData], bool | None]
CONTAINER_SENSORS: tuple[PortainerContainerBinarySensorEntityDescription, ...] = (
PortainerContainerBinarySensorEntityDescription(
key="status",
@@ -69,18 +60,6 @@ ENDPOINT_SENSORS: tuple[PortainerEndpointBinarySensorEntityDescription, ...] = (
),
)
STACK_SENSORS: tuple[PortainerStackBinarySensorEntityDescription, ...] = (
PortainerStackBinarySensorEntityDescription(
key="stack_status",
translation_key="status",
state_fn=lambda data: (
data.stack.status == STACK_STATUS_ACTIVE
), # 1 = Active | 2 = Inactive
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
async def async_setup_entry(
hass: HomeAssistant,
@@ -119,24 +98,9 @@ async def async_setup_entry(
if entity_description.state_fn(container)
)
def _async_add_new_stacks(
stacks: list[tuple[PortainerCoordinatorData, PortainerStackData]],
) -> None:
"""Add new stack sensors."""
async_add_entities(
PortainerStackSensor(
coordinator,
entity_description,
stack,
endpoint,
)
for (endpoint, stack) in stacks
for entity_description in STACK_SENSORS
)
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
coordinator.new_containers_callbacks.append(_async_add_new_containers)
coordinator.new_stacks_callbacks.append(_async_add_new_stacks)
_async_add_new_endpoints(
[
endpoint
@@ -151,13 +115,6 @@ async def async_setup_entry(
for container in endpoint.containers.values()
]
)
_async_add_new_stacks(
[
(endpoint, stack)
for endpoint in coordinator.data.values()
for stack in endpoint.stacks.values()
]
)
class PortainerEndpointSensor(PortainerEndpointEntity, BinarySensorEntity):
@@ -205,27 +162,3 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
return self.entity_description.state_fn(self.container_data)
class PortainerStackSensor(PortainerStackEntity, BinarySensorEntity):
"""Representation of a Portainer stack sensor."""
entity_description: PortainerStackBinarySensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerStackBinarySensorEntityDescription,
device_info: PortainerStackData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer stack sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
return self.entity_description.state_fn(self.stack_data)

View File

@@ -7,11 +7,3 @@ DEFAULT_NAME = "Portainer"
ENDPOINT_STATUS_DOWN = 2
CONTAINER_STATE_RUNNING = "running"
STACK_STATUS_ACTIVE = 1
STACK_STATUS_INACTIVE = 2
STACK_TYPE_SWARM = 1
STACK_TYPE_COMPOSE = 2
STACK_TYPE_KUBERNETES = 3

View File

@@ -21,7 +21,6 @@ from pyportainer.models.docker import (
)
from pyportainer.models.docker_inspect import DockerInfo, DockerVersion
from pyportainer.models.portainer import Endpoint
from pyportainer.models.stacks import Stack
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
@@ -49,7 +48,6 @@ class PortainerCoordinatorData:
docker_version: DockerVersion
docker_info: DockerInfo
docker_system_df: DockerSystemDF
stacks: dict[str, PortainerStackData]
@dataclass(slots=True)
@@ -59,15 +57,6 @@ class PortainerContainerData:
container: DockerContainer
stats: DockerContainerStats | None
stats_pre: DockerContainerStats | None
stack: Stack | None
@dataclass(slots=True)
class PortainerStackData:
"""Stack data held by the Portainer coordinator."""
stack: Stack
container_count: int = 0
class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorData]]):
@@ -93,7 +82,6 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
self.known_endpoints: set[int] = set()
self.known_containers: set[tuple[int, str]] = set()
self.known_stacks: set[tuple[int, str]] = set()
self.new_endpoints_callbacks: list[
Callable[[list[PortainerCoordinatorData]], None]
@@ -103,9 +91,6 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
[list[tuple[PortainerCoordinatorData, PortainerContainerData]]], None
]
] = []
self.new_stacks_callbacks: list[
Callable[[list[tuple[PortainerCoordinatorData, PortainerStackData]]], None]
] = []
async def _async_setup(self) -> None:
"""Set up the Portainer Data Update Coordinator."""
@@ -168,47 +153,28 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
docker_version,
docker_info,
docker_system_df,
stacks,
) = await asyncio.gather(
self.portainer.get_containers(endpoint_id=endpoint.id),
self.portainer.docker_version(endpoint_id=endpoint.id),
self.portainer.docker_info(endpoint_id=endpoint.id),
self.portainer.get_containers(endpoint.id),
self.portainer.docker_version(endpoint.id),
self.portainer.docker_info(endpoint.id),
self.portainer.docker_system_df(endpoint.id),
self.portainer.get_stacks(endpoint_id=endpoint.id),
)
prev_endpoint = self.data.get(endpoint.id) if self.data else None
container_map: dict[str, PortainerContainerData] = {}
stack_map: dict[str, PortainerStackData] = {
stack.name: PortainerStackData(stack=stack, container_count=0)
for stack in stacks
}
# Map containers, started and stopped
for container in containers:
container_name = self._get_container_name(container.names[0])
prev_container = (
prev_endpoint.containers.get(container_name)
prev_endpoint.containers[container_name]
if prev_endpoint
else None
)
# Check if container belongs to a stack via docker compose label
stack_name: str | None = (
container.labels.get("com.docker.compose.project")
if container.labels
else None
)
if stack_name and (stack_data := stack_map.get(stack_name)):
stack_data.container_count += 1
container_map[container_name] = PortainerContainerData(
container=container,
stats=None,
stats_pre=prev_container.stats if prev_container else None,
stack=stack_map[stack_name].stack
if stack_name and stack_name in stack_map
else None,
)
# Separately fetch stats for running containers
@@ -263,7 +229,6 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
docker_version=docker_version,
docker_info=docker_info,
docker_system_df=docker_system_df,
stacks=stack_map,
)
self._async_add_remove_endpoints(mapped_endpoints)
@@ -291,17 +256,6 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
_LOGGER.debug("New containers found: %s", new_containers)
self.known_containers.update(new_containers)
# Stack management
current_stacks = {
(endpoint.id, stack_name)
for endpoint in mapped_endpoints.values()
for stack_name in endpoint.stacks
}
new_stacks = current_stacks - self.known_stacks
if new_stacks:
_LOGGER.debug("New stacks found: %s", new_stacks)
self.known_stacks.update(new_stacks)
def _get_container_name(self, container_name: str) -> str:
"""Sanitize to get a proper container name."""
return container_name.replace("/", " ").strip()

View File

@@ -11,7 +11,6 @@ from .coordinator import (
PortainerContainerData,
PortainerCoordinator,
PortainerCoordinatorData,
PortainerStackData,
)
@@ -87,13 +86,9 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
),
model="Container",
name=self.device_name,
# If the container belongs to a stack, nest it under the stack
# else it's the endpoint
via_device=(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{device_info.stack.name}"
if device_info.stack
else f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
f"{self.coordinator.config_entry.entry_id}_{self.endpoint_id}",
),
translation_key=None if self.device_name else "unknown_container",
entry_type=DeviceEntryType.SERVICE,
@@ -112,54 +107,3 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
def container_data(self) -> PortainerContainerData:
"""Return the coordinator data for this container."""
return self.coordinator.data[self.endpoint_id].containers[self.device_name]
class PortainerStackEntity(PortainerCoordinatorEntity):
"""Base implementation for Portainer stack."""
def __init__(
self,
device_info: PortainerStackData,
coordinator: PortainerCoordinator,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize a Portainer stack."""
super().__init__(coordinator)
self._device_info = device_info
self.stack_id = device_info.stack.id
self.device_name = device_info.stack.name
self.endpoint_id = via_device.endpoint.id
self.endpoint_name = via_device.endpoint.name
self._attr_device_info = DeviceInfo(
identifiers={
(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{self.device_name}",
)
},
manufacturer=DEFAULT_NAME,
configuration_url=URL(
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.endpoint_id}/docker/stacks/{self.device_name}"
),
model="Stack",
name=self.device_name,
via_device=(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
),
)
@property
def available(self) -> bool:
"""Return if the stack is available."""
return (
super().available
and self.endpoint_id in self.coordinator.data
and self.device_name in self.coordinator.data[self.endpoint_id].stacks
)
@property
def stack_data(self) -> PortainerStackData:
"""Return the coordinator data for this stack."""
return self.coordinator.data[self.endpoint_id].stacks[self.device_name]

View File

@@ -70,12 +70,6 @@
"operating_system_version": {
"default": "mdi:alpha-v-box"
},
"stack_containers_count": {
"default": "mdi:server"
},
"stack_type": {
"default": "mdi:server"
},
"volume_disk_usage_total_size": {
"default": "mdi:harddisk"
}
@@ -86,12 +80,6 @@
"state": {
"on": "mdi:arrow-up-box"
}
},
"stack": {
"default": "mdi:arrow-down-box",
"state": {
"on": "mdi:arrow-up-box"
}
}
}
},

View File

@@ -17,18 +17,15 @@ from homeassistant.const import PERCENTAGE, UnitOfInformation
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import STACK_TYPE_COMPOSE, STACK_TYPE_KUBERNETES, STACK_TYPE_SWARM
from .coordinator import (
PortainerConfigEntry,
PortainerContainerData,
PortainerCoordinator,
PortainerStackData,
)
from .entity import (
PortainerContainerEntity,
PortainerCoordinatorData,
PortainerEndpointEntity,
PortainerStackEntity,
)
PARALLEL_UPDATES = 1
@@ -48,13 +45,6 @@ class PortainerEndpointSensorEntityDescription(SensorEntityDescription):
value_fn: Callable[[PortainerCoordinatorData], StateType]
@dataclass(frozen=True, kw_only=True)
class PortainerStackSensorEntityDescription(SensorEntityDescription):
"""Class to hold Portainer stack sensor description."""
value_fn: Callable[[PortainerStackData], StateType]
CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
PortainerContainerSensorEntityDescription(
key="image",
@@ -288,32 +278,6 @@ ENDPOINT_SENSORS: tuple[PortainerEndpointSensorEntityDescription, ...] = (
),
)
STACK_SENSORS: tuple[PortainerStackSensorEntityDescription, ...] = (
PortainerStackSensorEntityDescription(
key="stack_type",
translation_key="stack_type",
value_fn=lambda data: (
"swarm"
if data.stack.type == STACK_TYPE_SWARM
else "compose"
if data.stack.type == STACK_TYPE_COMPOSE
else "kubernetes"
if data.stack.type == STACK_TYPE_KUBERNETES
else None
),
device_class=SensorDeviceClass.ENUM,
options=["swarm", "compose", "kubernetes"],
entity_category=EntityCategory.DIAGNOSTIC,
),
PortainerStackSensorEntityDescription(
key="stack_containers_count",
translation_key="stack_containers_count",
value_fn=lambda data: data.container_count,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
)
async def async_setup_entry(
hass: HomeAssistant,
@@ -351,24 +315,8 @@ async def async_setup_entry(
for entity_description in CONTAINER_SENSORS
)
def _async_add_new_stacks(
stacks: list[tuple[PortainerCoordinatorData, PortainerStackData]],
) -> None:
"""Add new stack sensors."""
async_add_entities(
PortainerStackSensor(
coordinator,
entity_description,
stack,
endpoint,
)
for (endpoint, stack) in stacks
for entity_description in STACK_SENSORS
)
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
coordinator.new_containers_callbacks.append(_async_add_new_containers)
coordinator.new_stacks_callbacks.append(_async_add_new_stacks)
_async_add_new_endpoints(
[
@@ -384,13 +332,6 @@ async def async_setup_entry(
for container in endpoint.containers.values()
]
)
_async_add_new_stacks(
[
(endpoint, stack)
for endpoint in coordinator.data.values()
for stack in endpoint.stacks.values()
]
)
class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
@@ -439,27 +380,3 @@ class PortainerEndpointSensor(PortainerEndpointEntity, SensorEntity):
"""Return the state of the sensor."""
endpoint_data = self.coordinator.data[self._device_info.endpoint.id]
return self.entity_description.value_fn(endpoint_data)
class PortainerStackSensor(PortainerStackEntity, SensorEntity):
"""Representation of a Portainer stack sensor."""
entity_description: PortainerStackSensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerStackSensorEntityDescription,
device_info: PortainerStackData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer stack sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.stack_data)

View File

@@ -147,18 +147,6 @@
"operating_system_version": {
"name": "Operating system version"
},
"stack_containers_count": {
"name": "Containers",
"unit_of_measurement": "containers"
},
"stack_type": {
"name": "Type",
"state": {
"compose": "Compose",
"kubernetes": "Kubernetes",
"swarm": "Swarm"
}
},
"volume_disk_usage_total_size": {
"name": "Volume disk usage total size"
}
@@ -166,9 +154,6 @@
"switch": {
"container": {
"name": "Container"
},
"stack": {
"name": "Stack"
}
}
},

View File

@@ -23,17 +23,9 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .const import DOMAIN, STACK_STATUS_ACTIVE
from .coordinator import (
PortainerContainerData,
PortainerCoordinator,
PortainerStackData,
)
from .entity import (
PortainerContainerEntity,
PortainerCoordinatorData,
PortainerStackEntity,
)
from .const import DOMAIN
from .coordinator import PortainerContainerData, PortainerCoordinator
from .entity import PortainerContainerEntity, PortainerCoordinatorData
@dataclass(frozen=True, kw_only=True)
@@ -45,19 +37,10 @@ class PortainerSwitchEntityDescription(SwitchEntityDescription):
turn_off_fn: Callable[[str, Portainer, int, str], Coroutine[Any, Any, None]]
@dataclass(frozen=True, kw_only=True)
class PortainerStackSwitchEntityDescription(SwitchEntityDescription):
"""Class to hold Portainer stack switch description."""
is_on_fn: Callable[[PortainerStackData], bool | None]
turn_on_fn: Callable[[str, Portainer, int, int], Coroutine[Any, Any, None]]
turn_off_fn: Callable[[str, Portainer, int, int], Coroutine[Any, Any, None]]
PARALLEL_UPDATES = 1
async def perform_container_action(
async def perform_action(
action: str, portainer: Portainer, endpoint_id: int, container_id: str
) -> None:
"""Perform an action on a container."""
@@ -87,52 +70,14 @@ async def perform_container_action(
) from err
async def perform_stack_action(
action: str, portainer: Portainer, endpoint_id: int, stack_id: int
) -> None:
"""Perform an action on a stack."""
try:
match action:
case "start":
await portainer.start_stack(stack_id, endpoint_id)
case "stop":
await portainer.stop_stack(stack_id, endpoint_id)
except PortainerAuthenticationError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="invalid_auth_no_details",
) from err
except PortainerConnectionError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="cannot_connect_no_details",
) from err
except PortainerTimeoutError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="timeout_connect_no_details",
) from err
CONTAINER_SWITCHES: tuple[PortainerSwitchEntityDescription, ...] = (
SWITCHES: tuple[PortainerSwitchEntityDescription, ...] = (
PortainerSwitchEntityDescription(
key="container",
translation_key="container",
device_class=SwitchDeviceClass.SWITCH,
is_on_fn=lambda data: data.container.state == "running",
turn_on_fn=perform_container_action,
turn_off_fn=perform_container_action,
),
)
STACK_SWITCHES: tuple[PortainerStackSwitchEntityDescription, ...] = (
PortainerStackSwitchEntityDescription(
key="stack",
translation_key="stack",
device_class=SwitchDeviceClass.SWITCH,
is_on_fn=lambda data: data.stack.status == STACK_STATUS_ACTIVE,
turn_on_fn=perform_stack_action,
turn_off_fn=perform_stack_action,
turn_on_fn=perform_action,
turn_off_fn=perform_action,
),
)
@@ -157,26 +102,10 @@ async def async_setup_entry(
endpoint,
)
for (endpoint, container) in containers
for entity_description in CONTAINER_SWITCHES
)
def _async_add_new_stacks(
stacks: list[tuple[PortainerCoordinatorData, PortainerStackData]],
) -> None:
"""Add new stack switch sensors."""
async_add_entities(
PortainerStackSwitch(
coordinator,
entity_description,
stack,
endpoint,
)
for (endpoint, stack) in stacks
for entity_description in STACK_SWITCHES
for entity_description in SWITCHES
)
coordinator.new_containers_callbacks.append(_async_add_new_containers)
coordinator.new_stacks_callbacks.append(_async_add_new_stacks)
_async_add_new_containers(
[
(endpoint, container)
@@ -184,13 +113,6 @@ async def async_setup_entry(
for container in endpoint.containers.values()
]
)
_async_add_new_stacks(
[
(endpoint, stack)
for endpoint in coordinator.data.values()
for stack in endpoint.stacks.values()
]
)
class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
@@ -235,47 +157,3 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
self.container_data.container.id,
)
await self.coordinator.async_request_refresh()
class PortainerStackSwitch(PortainerStackEntity, SwitchEntity):
"""Representation of a Portainer stack switch."""
entity_description: PortainerStackSwitchEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerStackSwitchEntityDescription,
device_info: PortainerStackData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer stack switch."""
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.stack.id}_{entity_description.key}"
@property
def is_on(self) -> bool | None:
"""Return the state of the device."""
return self.entity_description.is_on_fn(self.stack_data)
async def async_turn_on(self, **kwargs: Any) -> None:
"""Start (turn on) the stack."""
await self.entity_description.turn_on_fn(
"start",
self.coordinator.portainer,
self.endpoint_id,
self.stack_data.stack.id,
)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Stop (turn off) the stack."""
await self.entity_description.turn_off_fn(
"stop",
self.coordinator.portainer,
self.endpoint_id,
self.stack_data.stack.id,
)
await self.coordinator.async_request_refresh()

View File

@@ -7,7 +7,7 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"requirements": ["powerfox==2.1.1"],
"requirements": ["powerfox==2.1.0"],
"zeroconf": [
{
"name": "powerfox*",

View File

@@ -2,18 +2,12 @@
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from powerfox import PowerfoxAuthenticationError, PowerfoxConnectionError, PowerfoxLocal
import voluptuous as vol
from homeassistant.config_entries import (
SOURCE_RECONFIGURE,
SOURCE_USER,
ConfigFlow,
ConfigFlowResult,
)
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY, CONF_HOST
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
@@ -27,12 +21,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
}
)
class PowerfoxLocalConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Powerfox Local."""
@@ -45,7 +33,7 @@ class PowerfoxLocalConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the user step."""
errors = {}
errors: dict[str, str] = {}
if user_input is not None:
self._host = user_input[CONF_HOST]
@@ -59,15 +47,7 @@ class PowerfoxLocalConfigFlow(ConfigFlow, domain=DOMAIN):
except PowerfoxConnectionError:
errors["base"] = "cannot_connect"
else:
if self.source == SOURCE_USER:
return self._async_create_entry()
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(),
data={
CONF_HOST: self._host,
CONF_API_KEY: self._api_key,
},
)
return self._async_create_entry()
return self.async_show_form(
step_id="user",
@@ -104,51 +84,6 @@ class PowerfoxLocalConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a confirmation flow for zeroconf discovery."""
return self._async_create_entry()
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle re-authentication flow."""
self._host = entry_data[CONF_HOST]
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle re-authentication confirmation."""
errors = {}
if user_input is not None:
self._api_key = user_input[CONF_API_KEY]
reauth_entry = self._get_reauth_entry()
client = PowerfoxLocal(
host=reauth_entry.data[CONF_HOST],
api_key=user_input[CONF_API_KEY],
session=async_get_clientsession(self.hass),
)
try:
await client.value()
except PowerfoxAuthenticationError:
errors["base"] = "invalid_auth"
except PowerfoxConnectionError:
errors["base"] = "cannot_connect"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates=user_input,
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_REAUTH_DATA_SCHEMA,
errors=errors,
)
async def async_step_reconfigure(
self, user_input: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reconfiguration."""
return await self.async_step_user()
def _async_create_entry(self) -> ConfigFlowResult:
"""Create a config entry."""
return self.async_create_entry(
@@ -168,8 +103,5 @@ class PowerfoxLocalConfigFlow(ConfigFlow, domain=DOMAIN):
)
await client.value()
await self.async_set_unique_id(self._device_id, raise_on_progress=False)
if self.source == SOURCE_RECONFIGURE:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
await self.async_set_unique_id(self._device_id)
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})

View File

@@ -2,17 +2,11 @@
from __future__ import annotations
from powerfox import (
LocalResponse,
PowerfoxAuthenticationError,
PowerfoxConnectionError,
PowerfoxLocal,
)
from powerfox import LocalResponse, PowerfoxConnectionError, PowerfoxLocal
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -46,12 +40,6 @@ class PowerfoxLocalDataUpdateCoordinator(DataUpdateCoordinator[LocalResponse]):
"""Fetch data from the local poweropti."""
try:
return await self.client.value()
except PowerfoxAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": str(err)},
) from err
except PowerfoxConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,

View File

@@ -1,24 +0,0 @@
"""Support for Powerfox Local diagnostics."""
from __future__ import annotations
from typing import Any
from homeassistant.core import HomeAssistant
from .coordinator import PowerfoxLocalConfigEntry
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: PowerfoxLocalConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for Powerfox Local config entry."""
coordinator = entry.runtime_data
return {
"power": coordinator.data.power,
"energy_usage": coordinator.data.energy_usage,
"energy_usage_high_tariff": coordinator.data.energy_usage_high_tariff,
"energy_usage_low_tariff": coordinator.data.energy_usage_low_tariff,
"energy_return": coordinator.data.energy_return,
}

View File

@@ -6,8 +6,8 @@
"documentation": "https://www.home-assistant.io/integrations/powerfox_local",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "platinum",
"requirements": ["powerfox==2.1.1"],
"quality_scale": "bronze",
"requirements": ["powerfox==2.1.0"],
"zeroconf": [
{
"name": "powerfox*",

View File

@@ -43,12 +43,12 @@ rules:
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: done
reauthentication-flow: todo
test-coverage: done
# Gold
devices: done
diagnostics: done
diagnostics: todo
discovery-update-info: done
discovery: done
docs-data-update: done
@@ -74,7 +74,7 @@ rules:
status: exempt
comment: |
There is no need for icon translations.
reconfiguration-flow: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |

View File

@@ -2,26 +2,13 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unique_id_mismatch": "Please ensure you reconfigure against the same device."
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
},
"step": {
"reauth_confirm": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "[%key:component::powerfox_local::config::step::user::data_description::api_key%]"
},
"description": "The API key for your Poweropti device is no longer valid.",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
@@ -56,9 +43,6 @@
}
},
"exceptions": {
"invalid_auth": {
"message": "Error while authenticating with the device: {error}"
},
"update_failed": {
"message": "Error while updating the device: {error}"
}

View File

@@ -40,7 +40,6 @@ from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.BUTTON,
Platform.SENSOR,
]

View File

@@ -74,20 +74,16 @@ def _get_nodes_data(data: dict[str, Any]) -> list[dict[str, Any]]:
raise ProxmoxSSLError from err
except ConnectTimeout as err:
raise ProxmoxConnectTimeout from err
except ResourceException as err:
except (ResourceException, requests.exceptions.ConnectionError) as err:
raise ProxmoxNoNodesFound from err
except requests.exceptions.ConnectionError as err:
raise ProxmoxConnectionError from err
nodes_data: list[dict[str, Any]] = []
for node in nodes:
try:
vms = client.nodes(node["node"]).qemu.get()
containers = client.nodes(node["node"]).lxc.get()
except ResourceException as err:
except (ResourceException, requests.exceptions.ConnectionError) as err:
raise ProxmoxNoNodesFound from err
except requests.exceptions.ConnectionError as err:
raise ProxmoxConnectionError from err
nodes_data.append(
{
@@ -201,30 +197,18 @@ class ProxmoxveConfigFlow(ConfigFlow, domain=DOMAIN):
"""Validate the user input. Return nodes data and/or errors."""
errors: dict[str, str] = {}
proxmox_nodes: list[dict[str, Any]] = []
err: ProxmoxError | None = None
try:
proxmox_nodes = await self.hass.async_add_executor_job(
_get_nodes_data, user_input
)
except ProxmoxConnectTimeout as exc:
except ProxmoxConnectTimeout:
errors["base"] = "connect_timeout"
err = exc
except ProxmoxAuthenticationError as exc:
except ProxmoxAuthenticationError:
errors["base"] = "invalid_auth"
err = exc
except ProxmoxSSLError as exc:
except ProxmoxSSLError:
errors["base"] = "ssl_error"
err = exc
except ProxmoxNoNodesFound as exc:
except ProxmoxNoNodesFound:
errors["base"] = "no_nodes_found"
err = exc
except ProxmoxConnectionError as exc:
errors["base"] = "cannot_connect"
err = exc
if err is not None:
_LOGGER.debug("Error: %s: %s", errors["base"], err)
return proxmox_nodes, errors
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
@@ -243,8 +227,6 @@ class ProxmoxveConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="ssl_error")
except ProxmoxNoNodesFound:
return self.async_abort(reason="no_nodes_found")
except ProxmoxConnectionError:
return self.async_abort(reason="cannot_connect")
return self.async_create_entry(
title=import_data[CONF_HOST],
@@ -252,25 +234,17 @@ class ProxmoxveConfigFlow(ConfigFlow, domain=DOMAIN):
)
class ProxmoxError(HomeAssistantError):
"""Base class for Proxmox VE errors."""
class ProxmoxNoNodesFound(ProxmoxError):
class ProxmoxNoNodesFound(HomeAssistantError):
"""Error to indicate no nodes found."""
class ProxmoxConnectTimeout(ProxmoxError):
class ProxmoxConnectTimeout(HomeAssistantError):
"""Error to indicate a connection timeout."""
class ProxmoxSSLError(ProxmoxError):
class ProxmoxSSLError(HomeAssistantError):
"""Error to indicate an SSL error."""
class ProxmoxAuthenticationError(ProxmoxError):
class ProxmoxAuthenticationError(HomeAssistantError):
"""Error to indicate an authentication error."""
class ProxmoxConnectionError(ProxmoxError):
"""Error to indicate a connection error."""

View File

@@ -101,18 +101,12 @@ class ProxmoxCoordinator(DataUpdateCoordinator[dict[str, ProxmoxNodeData]]):
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
except ResourceException as err:
except (ResourceException, requests.exceptions.ConnectionError) as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="no_nodes_found",
translation_placeholders={"error": repr(err)},
) from err
except requests.exceptions.ConnectionError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
async def _async_update_data(self) -> dict[str, ProxmoxNodeData]:
"""Fetch data from Proxmox VE API."""
@@ -139,18 +133,12 @@ class ProxmoxCoordinator(DataUpdateCoordinator[dict[str, ProxmoxNodeData]]):
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
except ResourceException as err:
except (ResourceException, requests.exceptions.ConnectionError) as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="no_nodes_found",
translation_placeholders={"error": repr(err)},
) from err
except requests.exceptions.ConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
data: dict[str, ProxmoxNodeData] = {}
for node, (vms, containers) in zip(nodes, vms_containers, strict=True):

View File

@@ -13,71 +13,6 @@
"stop": {
"default": "mdi:stop"
}
},
"sensor": {
"container_cpu": {
"default": "mdi:cpu-64-bit"
},
"container_disk": {
"default": "mdi:harddisk"
},
"container_max_cpu": {
"default": "mdi:cpu-64-bit"
},
"container_max_disk": {
"default": "mdi:harddisk"
},
"container_max_memory": {
"default": "mdi:memory"
},
"container_memory": {
"default": "mdi:memory"
},
"container_status": {
"default": "mdi:server"
},
"node_cpu": {
"default": "mdi:cpu-64-bit"
},
"node_disk": {
"default": "mdi:harddisk"
},
"node_max_cpu": {
"default": "mdi:cpu-64-bit"
},
"node_max_disk": {
"default": "mdi:harddisk"
},
"node_max_memory": {
"default": "mdi:memory"
},
"node_memory": {
"default": "mdi:memory"
},
"node_status": {
"default": "mdi:server"
},
"vm_cpu": {
"default": "mdi:cpu-64-bit"
},
"vm_disk": {
"default": "mdi:harddisk"
},
"vm_max_cpu": {
"default": "mdi:cpu-64-bit"
},
"vm_max_disk": {
"default": "mdi:harddisk"
},
"vm_max_memory": {
"default": "mdi:memory"
},
"vm_memory": {
"default": "mdi:memory"
},
"vm_status": {
"default": "mdi:server"
}
}
}
}

View File

@@ -1,386 +0,0 @@
"""Sensor platform for Proxmox VE integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any
from homeassistant.components.sensor import (
EntityCategory,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
StateType,
)
from homeassistant.const import PERCENTAGE, UnitOfInformation
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator, ProxmoxNodeData
from .entity import ProxmoxContainerEntity, ProxmoxNodeEntity, ProxmoxVMEntity
@dataclass(frozen=True, kw_only=True)
class ProxmoxNodeSensorEntityDescription(SensorEntityDescription):
"""Class to hold Proxmox node sensor description."""
value_fn: Callable[[ProxmoxNodeData], StateType]
@dataclass(frozen=True, kw_only=True)
class ProxmoxVMSensorEntityDescription(SensorEntityDescription):
"""Class to hold Proxmox VM sensor description."""
value_fn: Callable[[dict[str, Any]], StateType]
@dataclass(frozen=True, kw_only=True)
class ProxmoxContainerSensorEntityDescription(SensorEntityDescription):
"""Class to hold Proxmox container sensor description."""
value_fn: Callable[[dict[str, Any]], StateType]
NODE_SENSORS: tuple[ProxmoxNodeSensorEntityDescription, ...] = (
ProxmoxNodeSensorEntityDescription(
key="node_cpu",
translation_key="node_cpu",
value_fn=lambda data: data.node["cpu"] * 100,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
suggested_display_precision=2,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxNodeSensorEntityDescription(
key="node_max_cpu",
translation_key="node_max_cpu",
value_fn=lambda data: data.node["maxcpu"],
),
ProxmoxNodeSensorEntityDescription(
key="node_disk",
translation_key="node_disk",
value_fn=lambda data: data.node["disk"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxNodeSensorEntityDescription(
key="node_max_disk",
translation_key="node_max_disk",
value_fn=lambda data: data.node["maxdisk"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxNodeSensorEntityDescription(
key="node_memory",
translation_key="node_memory",
value_fn=lambda data: data.node["mem"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxNodeSensorEntityDescription(
key="node_max_memory",
translation_key="node_max_memory",
value_fn=lambda data: data.node["maxmem"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxNodeSensorEntityDescription(
key="node_status",
translation_key="node_status",
value_fn=lambda data: data.node["status"],
device_class=SensorDeviceClass.ENUM,
options=["online", "offline"],
),
)
VM_SENSORS: tuple[ProxmoxVMSensorEntityDescription, ...] = (
ProxmoxVMSensorEntityDescription(
key="vm_max_cpu",
translation_key="vm_max_cpu",
value_fn=lambda data: data["cpus"],
),
ProxmoxVMSensorEntityDescription(
key="vm_cpu",
translation_key="vm_cpu",
value_fn=lambda data: data["cpu"] * 100,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
suggested_display_precision=2,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxVMSensorEntityDescription(
key="vm_memory",
translation_key="vm_memory",
value_fn=lambda data: data["mem"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxVMSensorEntityDescription(
key="vm_max_memory",
translation_key="vm_max_memory",
value_fn=lambda data: data["maxmem"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxVMSensorEntityDescription(
key="vm_disk",
translation_key="vm_disk",
value_fn=lambda data: data["disk"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxVMSensorEntityDescription(
key="vm_max_disk",
translation_key="vm_max_disk",
value_fn=lambda data: data["maxdisk"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxVMSensorEntityDescription(
key="vm_status",
translation_key="vm_status",
value_fn=lambda data: data["status"],
device_class=SensorDeviceClass.ENUM,
options=["running", "stopped", "suspended"],
),
)
CONTAINER_SENSORS: tuple[ProxmoxContainerSensorEntityDescription, ...] = (
ProxmoxContainerSensorEntityDescription(
key="container_max_cpu",
translation_key="container_max_cpu",
value_fn=lambda data: data["cpus"],
),
ProxmoxContainerSensorEntityDescription(
key="container_cpu",
translation_key="container_cpu",
value_fn=lambda data: data["cpu"] * 100,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
suggested_display_precision=2,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxContainerSensorEntityDescription(
key="container_memory",
translation_key="container_memory",
value_fn=lambda data: data["mem"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxContainerSensorEntityDescription(
key="container_max_memory",
translation_key="container_max_memory",
value_fn=lambda data: data["maxmem"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxContainerSensorEntityDescription(
key="container_disk",
translation_key="container_disk",
value_fn=lambda data: data["disk"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxContainerSensorEntityDescription(
key="container_max_disk",
translation_key="container_max_disk",
value_fn=lambda data: data["maxdisk"],
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=1,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
ProxmoxContainerSensorEntityDescription(
key="container_status",
translation_key="container_status",
value_fn=lambda data: data["status"],
device_class=SensorDeviceClass.ENUM,
options=["running", "stopped", "suspended"],
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: ProxmoxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Proxmox VE sensors."""
coordinator = entry.runtime_data
def _async_add_new_nodes(nodes: list[ProxmoxNodeData]) -> None:
"""Add new node sensors."""
async_add_entities(
ProxmoxNodeSensor(coordinator, entity_description, node)
for node in nodes
for entity_description in NODE_SENSORS
)
def _async_add_new_vms(
vms: list[tuple[ProxmoxNodeData, dict[str, Any]]],
) -> None:
"""Add new VM sensors."""
async_add_entities(
ProxmoxVMSensor(coordinator, entity_description, vm, node_data)
for (node_data, vm) in vms
for entity_description in VM_SENSORS
)
def _async_add_new_containers(
containers: list[tuple[ProxmoxNodeData, dict[str, Any]]],
) -> None:
"""Add new container sensors."""
async_add_entities(
ProxmoxContainerSensor(
coordinator, entity_description, container, node_data
)
for (node_data, container) in containers
for entity_description in CONTAINER_SENSORS
)
coordinator.new_nodes_callbacks.append(_async_add_new_nodes)
coordinator.new_vms_callbacks.append(_async_add_new_vms)
coordinator.new_containers_callbacks.append(_async_add_new_containers)
_async_add_new_nodes(
[
node_data
for node_data in coordinator.data.values()
if node_data.node["node"] in coordinator.known_nodes
]
)
_async_add_new_vms(
[
(node_data, vm_data)
for node_data in coordinator.data.values()
for vmid, vm_data in node_data.vms.items()
if (node_data.node["node"], vmid) in coordinator.known_vms
]
)
_async_add_new_containers(
[
(node_data, container_data)
for node_data in coordinator.data.values()
for vmid, container_data in node_data.containers.items()
if (node_data.node["node"], vmid) in coordinator.known_containers
]
)
class ProxmoxNodeSensor(ProxmoxNodeEntity, SensorEntity):
"""Representation of a Proxmox VE node sensor."""
entity_description: ProxmoxNodeSensorEntityDescription
def __init__(
self,
coordinator: ProxmoxCoordinator,
entity_description: ProxmoxNodeSensorEntityDescription,
node_data: ProxmoxNodeData,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator, node_data)
self.entity_description = entity_description
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}"
@property
def native_value(self) -> StateType:
"""Return the native value of the sensor."""
return self.entity_description.value_fn(self.coordinator.data[self.device_name])
class ProxmoxVMSensor(ProxmoxVMEntity, SensorEntity):
"""Represents a Proxmox VE VM sensor."""
entity_description: ProxmoxVMSensorEntityDescription
def __init__(
self,
coordinator: ProxmoxCoordinator,
entity_description: ProxmoxVMSensorEntityDescription,
vm_data: dict[str, Any],
node_data: ProxmoxNodeData,
) -> None:
"""Initialize the Proxmox VM sensor."""
self.entity_description = entity_description
super().__init__(coordinator, vm_data, node_data)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
@property
def native_value(self) -> StateType:
"""Return the native value of the sensor."""
return self.entity_description.value_fn(self.vm_data)
class ProxmoxContainerSensor(ProxmoxContainerEntity, SensorEntity):
"""Represents a Proxmox VE container sensor."""
entity_description: ProxmoxContainerSensorEntityDescription
def __init__(
self,
coordinator: ProxmoxCoordinator,
entity_description: ProxmoxContainerSensorEntityDescription,
container_data: dict[str, Any],
node_data: ProxmoxNodeData,
) -> None:
"""Initialize the Proxmox container sensor."""
self.entity_description = entity_description
super().__init__(coordinator, container_data, node_data)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}"
@property
def native_value(self) -> StateType:
"""Return the native value of the sensor."""
return self.entity_description.value_fn(self.container_data)

View File

@@ -77,85 +77,6 @@
"stop_all": {
"name": "Stop all"
}
},
"sensor": {
"container_cpu": {
"name": "CPU usage"
},
"container_disk": {
"name": "Disk usage"
},
"container_max_cpu": {
"name": "Max CPU"
},
"container_max_disk": {
"name": "Max disk usage"
},
"container_max_memory": {
"name": "Max memory usage"
},
"container_memory": {
"name": "Memory usage"
},
"container_status": {
"name": "Status",
"state": {
"running": "Running",
"stopped": "Stopped",
"suspended": "Suspended"
}
},
"node_cpu": {
"name": "CPU usage"
},
"node_disk": {
"name": "Disk usage"
},
"node_max_cpu": {
"name": "Max CPU"
},
"node_max_disk": {
"name": "Max disk usage"
},
"node_max_memory": {
"name": "Max memory usage"
},
"node_memory": {
"name": "Memory usage"
},
"node_status": {
"name": "Status",
"state": {
"offline": "Offline",
"online": "Online"
}
},
"vm_cpu": {
"name": "CPU usage"
},
"vm_disk": {
"name": "Disk usage"
},
"vm_max_cpu": {
"name": "Max CPU"
},
"vm_max_disk": {
"name": "Max disk usage"
},
"vm_max_memory": {
"name": "Max memory usage"
},
"vm_memory": {
"name": "Memory usage"
},
"vm_status": {
"name": "Status",
"state": {
"running": "Running",
"stopped": "Stopped",
"suspended": "Suspended"
}
}
}
},
"exceptions": {
@@ -188,10 +109,6 @@
}
},
"issues": {
"deprecated_yaml_import_issue_cannot_connect": {
"description": "Configuring {integration_title} via YAML is deprecated and will be removed in a future release. While importing your configuration, a connection error occurred. Please correct your YAML configuration and restart Home Assistant, or remove the {domain} key from your configuration and configure the integration via the UI.",
"title": "[%key:component::proxmoxve::issues::deprecated_yaml_import_issue_connect_timeout::title%]"
},
"deprecated_yaml_import_issue_connect_timeout": {
"description": "Configuring {integration_title} via YAML is deprecated and will be removed in a future release. While importing your configuration, a connection timeout occurred. Please correct your YAML configuration and restart Home Assistant, or remove the {domain} key from your configuration and configure the integration via the UI.",
"title": "The {integration_title} YAML configuration is being removed"

View File

@@ -6,7 +6,7 @@ import logging
from typing import Any
import aiohttp
from pyrainbird.async_client import AsyncRainbirdController, CreateController
from pyrainbird.async_client import AsyncRainbirdClient, AsyncRainbirdController
from pyrainbird.exceptions import RainbirdApiException, RainbirdAuthException
from homeassistant.const import (
@@ -77,10 +77,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: RainbirdConfigEntry) ->
clientsession = async_create_clientsession()
_async_register_clientsession_shutdown(hass, entry, clientsession)
controller = CreateController(
clientsession,
entry.data[CONF_HOST],
entry.data[CONF_PASSWORD],
controller = AsyncRainbirdController(
AsyncRainbirdClient(
clientsession,
entry.data[CONF_HOST],
entry.data[CONF_PASSWORD],
)
)
if not (await _async_fix_unique_id(hass, controller, entry)):

View File

@@ -7,7 +7,7 @@ from collections.abc import Mapping
import logging
from typing import Any
from pyrainbird.async_client import CreateController
from pyrainbird.async_client import AsyncRainbirdClient, AsyncRainbirdController
from pyrainbird.data import WifiParams
from pyrainbird.exceptions import RainbirdApiException, RainbirdAuthException
import voluptuous as vol
@@ -137,7 +137,13 @@ class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN):
Raises a ConfigFlowError on failure.
"""
clientsession = async_create_clientsession()
controller = CreateController(clientsession, host, password)
controller = AsyncRainbirdController(
AsyncRainbirdClient(
clientsession,
host,
password,
)
)
try:
async with asyncio.timeout(TIMEOUT_SECONDS):
return await asyncio.gather(

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyrainbird"],
"requirements": ["pyrainbird==6.1.0"]
"requirements": ["pyrainbird==6.0.5"]
}

View File

@@ -482,6 +482,9 @@
"mqtt_unauthorized": {
"message": "Roborock MQTT servers rejected the connection due to rate limiting or invalid credentials. You may either attempt to reauthenticate or wait and reload the integration."
},
"multiple_maps_in_clean": {
"message": "All segments must belong to the same map. Got segments from maps: {map_flags}"
},
"no_coordinators": {
"message": "No devices were able to successfully setup"
},
@@ -491,6 +494,9 @@
"position_not_found": {
"message": "Robot position not found"
},
"segment_id_parse_error": {
"message": "Invalid segment ID format: {segment_id}"
},
"update_data_fail": {
"message": "Failed to update data"
},

View File

@@ -1,5 +1,6 @@
"""Support for Roborock vacuum class."""
import asyncio
import logging
from typing import Any
@@ -13,11 +14,11 @@ from homeassistant.components.vacuum import (
VacuumActivity,
VacuumEntityFeature,
)
from homeassistant.core import HomeAssistant, ServiceResponse, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.core import HomeAssistant, ServiceResponse
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .const import DOMAIN, MAP_SLEEP
from .coordinator import (
RoborockB01Q7UpdateCoordinator,
RoborockConfigEntry,
@@ -120,26 +121,6 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity):
self._home_trait = coordinator.properties_api.home
self._maps_trait = coordinator.properties_api.maps
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator.
Creates a repair issue when the vacuum reports different segments than
what was available when the area mapping was last configured.
"""
super()._handle_coordinator_update()
last_seen = self.last_seen_segments
if last_seen is None:
# No area mapping has been configured yet; nothing to check.
return
current_ids = {
f"{map_flag}_{room.segment_id}"
for map_flag, map_info in (self._home_trait.home_map_info or {}).items()
for room in map_info.rooms
}
if current_ids != {seg.id for seg in last_seen}:
self.async_create_segments_issue()
@property
def fan_speed_list(self) -> list[str]:
"""Get the list of available fan speeds."""
@@ -211,7 +192,7 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity):
return []
return [
Segment(
id=f"{map_flag}_{room.segment_id}",
id=f"{map_flag}:{room.segment_id}",
name=room.name,
group=map_info.name,
)
@@ -223,21 +204,51 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity):
"""Clean the specified segments."""
parsed: list[tuple[int, int]] = []
for seg_id in segment_ids:
map_flag_str, room_id_str = seg_id.split("_", maxsplit=1)
parsed.append((int(map_flag_str), int(room_id_str)))
# Segment id is mapflag:segment_id
parts = seg_id.split(":")
if len(parts) != 2:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="segment_id_parse_error",
translation_placeholders={"segment_id": seg_id},
)
try:
# We need to make sure both parts are ints.
parsed.append((int(parts[0]), int(parts[1])))
except ValueError as err:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="segment_id_parse_error",
translation_placeholders={"segment_id": seg_id},
) from err
# Segments from other maps are silently ignored; only segments
# belonging to the currently active map are cleaned.
current_map = self._maps_trait.current_map
current_map_segments = [
seg_id for map_flag, seg_id in parsed if map_flag == current_map
]
if not current_map_segments:
return
# Because segment_ids can overlap for each map,
# we need to make sure that only one map is passed in.
unique_map_flags = {map_flag for map_flag, _ in parsed}
if len(unique_map_flags) > 1:
map_flags_str = ", ".join(str(flag) for flag in sorted(unique_map_flags))
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="multiple_maps_in_clean",
translation_placeholders={"map_flags": map_flags_str},
)
target_map_flag = next(iter(unique_map_flags))
if self._maps_trait.current_map != target_map_flag:
# If the user is attempting to clean an area on a map that is not selected, we should try to change.
try:
await self._maps_trait.set_current_map(target_map_flag)
except RoborockException as err:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={"command": "load_multi_map"},
) from err
await asyncio.sleep(MAP_SLEEP)
# We can now confirm all segments are on our current map, so clean them all.
await self.send(
RoborockCommand.APP_SEGMENT_CLEAN,
[{"segments": current_map_segments}],
[{"segments": [seg_id for _, seg_id in parsed]}],
)
async def async_send_command(

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import asyncio
from datetime import timedelta
from typing import Any
from pysaunum import MAX_TEMPERATURE, MIN_TEMPERATURE, SaunumException
@@ -242,9 +241,9 @@ class LeilSaunaClimate(LeilSaunaEntity, ClimateEntity):
async def async_start_session(
self,
duration: timedelta = timedelta(minutes=120),
duration: int = 120,
target_temperature: int = 80,
fan_duration: timedelta = timedelta(minutes=10),
fan_duration: int = 10,
) -> None:
"""Start a sauna session with custom parameters."""
if self.coordinator.data.door_open:
@@ -255,15 +254,11 @@ class LeilSaunaClimate(LeilSaunaEntity, ClimateEntity):
try:
# Set all parameters before starting the session
await self.coordinator.client.async_set_sauna_duration(
int(duration.total_seconds() // 60)
)
await self.coordinator.client.async_set_sauna_duration(duration)
await self.coordinator.client.async_set_target_temperature(
target_temperature
)
await self.coordinator.client.async_set_fan_duration(
int(fan_duration.total_seconds() // 60)
)
await self.coordinator.client.async_set_fan_duration(fan_duration)
await self.coordinator.client.async_start_session()
except SaunumException as err:
raise HomeAssistantError(

View File

@@ -2,8 +2,6 @@
from __future__ import annotations
from datetime import timedelta
from pysaunum import MAX_DURATION, MAX_FAN_DURATION, MAX_TEMPERATURE, MIN_TEMPERATURE
import voluptuous as vol
@@ -29,22 +27,14 @@ def async_setup_services(hass: HomeAssistant) -> None:
SERVICE_START_SESSION,
entity_domain=CLIMATE_DOMAIN,
schema={
vol.Optional(ATTR_DURATION, default=timedelta(minutes=120)): vol.All(
cv.time_period,
vol.Range(
min=timedelta(minutes=1),
max=timedelta(minutes=MAX_DURATION),
),
vol.Optional(ATTR_DURATION, default=120): vol.All(
cv.positive_int, vol.Range(min=1, max=MAX_DURATION)
),
vol.Optional(ATTR_TARGET_TEMPERATURE, default=80): vol.All(
cv.positive_int, vol.Range(min=MIN_TEMPERATURE, max=MAX_TEMPERATURE)
),
vol.Optional(ATTR_FAN_DURATION, default=timedelta(minutes=10)): vol.All(
cv.time_period,
vol.Range(
min=timedelta(minutes=1),
max=timedelta(minutes=MAX_FAN_DURATION),
),
vol.Optional(ATTR_FAN_DURATION, default=10): vol.All(
cv.positive_int, vol.Range(min=1, max=MAX_FAN_DURATION)
),
},
func="async_start_session",

Some files were not shown because too many files have changed in this diff Show More