Compare commits

..

2 Commits

Author SHA1 Message Date
Robert Resch
927a03eca2 Merge branch 'dev' into drop-ignore-missing-annotations 2026-03-16 19:19:47 +01:00
epenet
70ec51bcbf Drop ignore-missing-annotations from pylint 2026-02-23 16:51:00 +01:00
1271 changed files with 32997 additions and 57840 deletions

1
.gitattributes vendored
View File

@@ -16,7 +16,6 @@ Dockerfile.dev linguist-language=Dockerfile
CODEOWNERS linguist-generated=true
Dockerfile linguist-generated=true
homeassistant/generated/*.py linguist-generated=true
machine/* linguist-generated=true
mypy.ini linguist-generated=true
requirements.txt linguist-generated=true
requirements_all.txt linguist-generated=true

View File

@@ -35,7 +35,6 @@ jobs:
channel: ${{ steps.version.outputs.channel }}
publish: ${{ steps.version.outputs.publish }}
architectures: ${{ env.ARCHITECTURES }}
base_image_version: ${{ env.BASE_IMAGE_VERSION }}
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -57,10 +56,10 @@ jobs:
with:
type: ${{ env.BUILD_TYPE }}
# - name: Verify version
# uses: home-assistant/actions/helpers/verify-version@master # zizmor: ignore[unpinned-uses]
# with:
# ignore-dev: true
- name: Verify version
uses: home-assistant/actions/helpers/verify-version@master # zizmor: ignore[unpinned-uses]
with:
ignore-dev: true
- name: Fail if translations files are checked in
run: |
@@ -101,7 +100,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
include:
- arch: amd64
os: ubuntu-24.04
os: ubuntu-latest
- arch: aarch64
os: ubuntu-24.04-arm
steps:
@@ -196,20 +195,77 @@ jobs:
run: |
echo "${GITHUB_SHA};${GITHUB_REF};${GITHUB_EVENT_NAME};${GITHUB_ACTOR}" > rootfs/OFFICIAL_IMAGE
- name: Build base image
uses: home-assistant/builder/actions/build-image@62a1597b84b3461abad9816d9cd92862a2b542c3 # 2026.03.2
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
arch: ${{ matrix.arch }}
build-args: |
BUILD_FROM=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ needs.init.outputs.base_image_version }}
cache-gha: false
container-registry-password: ${{ secrets.GITHUB_TOKEN }}
cosign-base-identity: "https://github.com/home-assistant/docker/.*"
cosign-base-verify: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ needs.init.outputs.base_image_version }}
image: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant
image-tags: ${{ needs.init.outputs.version }}
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
cosign-release: "v2.5.3"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- name: Build variables
id: vars
shell: bash
env:
ARCH: ${{ matrix.arch }}
run: |
echo "base_image=ghcr.io/home-assistant/${ARCH}-homeassistant-base:${BASE_IMAGE_VERSION}" >> "$GITHUB_OUTPUT"
echo "cache_image=ghcr.io/home-assistant/${ARCH}-homeassistant:latest" >> "$GITHUB_OUTPUT"
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
- name: Verify base image signature
env:
BASE_IMAGE: ${{ steps.vars.outputs.base_image }}
run: |
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
"${BASE_IMAGE}"
- name: Verify cache image signature
id: cache
continue-on-error: true
env:
CACHE_IMAGE: ${{ steps.vars.outputs.cache_image }}
run: |
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
"${CACHE_IMAGE}"
- name: Build base image
id: build
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
context: .
file: ./Dockerfile
platforms: ${{ steps.vars.outputs.platform }}
push: true
version: ${{ needs.init.outputs.version }}
cache-from: ${{ steps.cache.outcome == 'success' && steps.vars.outputs.cache_image || '' }}
build-args: |
BUILD_FROM=${{ steps.vars.outputs.base_image }}
tags: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
outputs: type=image,push=true,compression=zstd,compression-level=9,force-compression=true,oci-mediatypes=true
labels: |
io.hass.arch=${{ matrix.arch }}
io.hass.version=${{ needs.init.outputs.version }}
org.opencontainers.image.created=${{ steps.vars.outputs.created }}
org.opencontainers.image.version=${{ needs.init.outputs.version }}
- name: Sign image
env:
ARCH: ${{ matrix.arch }}
VERSION: ${{ needs.init.outputs.version }}
DIGEST: ${{ steps.build.outputs.digest }}
run: |
cosign sign --yes "ghcr.io/home-assistant/${ARCH}-homeassistant:${VERSION}@${DIGEST}"
build_machine:
name: Build ${{ matrix.machine }} machine core image
@@ -258,310 +314,308 @@ jobs:
with:
persist-credentials: false
- name: Compute extra tags
id: tags
shell: bash
- name: Set build additional args
env:
VERSION: ${{ needs.init.outputs.version }}
run: |
# Create general tags
if [[ "${VERSION}" =~ d ]]; then
echo "extra_tags=dev" >> "$GITHUB_OUTPUT"
echo "BUILD_ARGS=--additional-tag dev" >> $GITHUB_ENV
elif [[ "${VERSION}" =~ b ]]; then
echo "extra_tags=beta" >> "$GITHUB_OUTPUT"
echo "BUILD_ARGS=--additional-tag beta" >> $GITHUB_ENV
else
echo "extra_tags=stable" >> "$GITHUB_OUTPUT"
echo "BUILD_ARGS=--additional-tag stable" >> $GITHUB_ENV
fi
- name: Build machine image
uses: home-assistant/builder/actions/build-image@62a1597b84b3461abad9816d9cd92862a2b542c3 # 2026.03.2
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
arch: ${{ matrix.arch }}
build-args: |
BUILD_FROM=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
cache-gha: false
container-registry-password: ${{ secrets.GITHUB_TOKEN }}
context: machine/
cosign-base-identity: "https://github.com/home-assistant/core/.*"
cosign-base-verify: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
file: machine/${{ matrix.machine }}
image: ghcr.io/home-assistant/${{ matrix.machine }}-homeassistant
image-tags: |
${{ needs.init.outputs.version }}
${{ steps.tags.outputs.extra_tags }}
push: true
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@6cb4fd3d1338b6e22d0958a4bcb53e0965ea63b4 # 2026.02.1
with:
image: ${{ matrix.arch }}
args: |
$BUILD_ARGS \
--target /data/machine \
--cosign \
--machine "${{ needs.init.outputs.version }}=${{ matrix.machine }}"
publish_ha:
name: Publish version files
environment: ${{ needs.init.outputs.channel }}
if: github.repository_owner == 'home-assistant'
needs: ["init", "build_machine"]
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master # zizmor: ignore[unpinned-uses]
with:
name: ${{ secrets.GIT_NAME }}
email: ${{ secrets.GIT_EMAIL }}
token: ${{ secrets.GIT_TOKEN }}
- name: Update version file
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
with:
key: "homeassistant[]"
key-description: "Home Assistant Core"
version: ${{ needs.init.outputs.version }}
# publish_ha:
# name: Publish version files
# environment: ${{ needs.init.outputs.channel }}
# if: github.repository_owner == 'home-assistant'
# needs: ["init", "build_machine"]
# runs-on: ubuntu-latest
# permissions:
# contents: read
# steps:
# - name: Checkout the repository
# uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# with:
# persist-credentials: false
#
# - name: Initialize git
# uses: home-assistant/actions/helpers/git-init@master # zizmor: ignore[unpinned-uses]
# with:
# name: ${{ secrets.GIT_NAME }}
# email: ${{ secrets.GIT_EMAIL }}
# token: ${{ secrets.GIT_TOKEN }}
#
# - name: Update version file
# uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
# with:
# key: "homeassistant[]"
# key-description: "Home Assistant Core"
# version: ${{ needs.init.outputs.version }}
# channel: ${{ needs.init.outputs.channel }}
# exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
#
# - name: Update version file (stable -> beta)
# if: needs.init.outputs.channel == 'stable'
# uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
# with:
# key: "homeassistant[]"
# key-description: "Home Assistant Core"
# version: ${{ needs.init.outputs.version }}
# channel: beta
# exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
#
# publish_container:
# name: Publish meta container for ${{ matrix.registry }}
# environment: ${{ needs.init.outputs.channel }}
# if: github.repository_owner == 'home-assistant'
# needs: ["init", "build_base"]
# runs-on: ubuntu-latest
# permissions:
# contents: read # To check out the repository
# packages: write # To push to GHCR
# id-token: write # For cosign signing
# strategy:
# fail-fast: false
# matrix:
# registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
# steps:
# - name: Install Cosign
# uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v4.1.0
# with:
# cosign-release: "v2.5.3"
#
# - name: Login to DockerHub
# if: matrix.registry == 'docker.io/homeassistant'
# uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
# with:
# username: ${{ secrets.DOCKERHUB_USERNAME }}
# password: ${{ secrets.DOCKERHUB_TOKEN }}
#
# - name: Login to GitHub Container Registry
# uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
# with:
# registry: ghcr.io
# username: ${{ github.repository_owner }}
# password: ${{ secrets.GITHUB_TOKEN }}
#
# - name: Verify architecture image signatures
# shell: bash
# env:
# ARCHITECTURES: ${{ needs.init.outputs.architectures }}
# VERSION: ${{ needs.init.outputs.version }}
# run: |
# ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
# for arch in $ARCHS; do
# echo "Verifying ${arch} image signature..."
# cosign verify \
# --certificate-oidc-issuer https://token.actions.githubusercontent.com \
# --certificate-identity-regexp https://github.com/home-assistant/core/.* \
# "ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"
# done
# echo "✓ All images verified successfully"
#
# # Generate all Docker tags based on version string
# # Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
# # Examples:
# # 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
# # 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
# # 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
# - name: Generate Docker metadata
# id: meta
# uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
# with:
# images: ${{ matrix.registry }}/home-assistant
# sep-tags: ","
# tags: |
# type=raw,value=${{ needs.init.outputs.version }},priority=9999
# type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
# type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
# type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
# type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
# type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
# type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
#
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v3.7.1
#
# - name: Copy architecture images to DockerHub
# if: matrix.registry == 'docker.io/homeassistant'
# shell: bash
# env:
# ARCHITECTURES: ${{ needs.init.outputs.architectures }}
# VERSION: ${{ needs.init.outputs.version }}
# run: |
# # Use imagetools to copy image blobs directly between registries
# # This preserves provenance/attestations and seems to be much faster than pull/push
# ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
# for arch in $ARCHS; do
# echo "Copying ${arch} image to DockerHub..."
# for attempt in 1 2 3; do
# if docker buildx imagetools create \
# --tag "docker.io/homeassistant/${arch}-homeassistant:${VERSION}" \
# "ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"; then
# break
# fi
# echo "Attempt ${attempt} failed, retrying in 10 seconds..."
# sleep 10
# if [ "${attempt}" -eq 3 ]; then
# echo "Failed after 3 attempts"
# exit 1
# fi
# done
# cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${VERSION}"
# done
#
# - name: Create and push multi-arch manifests
# shell: bash
# env:
# ARCHITECTURES: ${{ needs.init.outputs.architectures }}
# REGISTRY: ${{ matrix.registry }}
# VERSION: ${{ needs.init.outputs.version }}
# META_TAGS: ${{ steps.meta.outputs.tags }}
# run: |
# # Build list of architecture images dynamically
# ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
# ARCH_IMAGES=()
# for arch in $ARCHS; do
# ARCH_IMAGES+=("${REGISTRY}/${arch}-homeassistant:${VERSION}")
# done
#
# # Build list of all tags for single manifest creation
# # Note: Using sep-tags=',' in metadata-action for easier parsing
# TAG_ARGS=()
# IFS=',' read -ra TAGS <<< "${META_TAGS}"
# for tag in "${TAGS[@]}"; do
# TAG_ARGS+=("--tag" "${tag}")
# done
#
# # Create manifest with ALL tags in a single operation (much faster!)
# echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
# docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
#
# # Sign each tag separately (signing requires individual tag names)
# echo "Signing all tags..."
# for tag in "${TAGS[@]}"; do
# echo "Signing ${tag}"
# cosign sign --yes "${tag}"
# done
#
# echo "All manifests created and signed successfully"
#
# build_python:
# name: Build PyPi package
# environment: ${{ needs.init.outputs.channel }}
# needs: ["init", "build_base"]
# runs-on: ubuntu-latest
# permissions:
# contents: read # To check out the repository
# id-token: write # For PyPI trusted publishing
# if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
# steps:
# - name: Checkout the repository
# uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# with:
# persist-credentials: false
#
# - name: Set up Python
# uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
# with:
# python-version-file: ".python-version"
#
# - name: Download translations
# uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
# with:
# name: translations
#
# - name: Extract translations
# run: |
# tar xvf translations.tar.gz
# rm translations.tar.gz
#
# - name: Build package
# shell: bash
# run: |
# # Remove dist, build, and homeassistant.egg-info
# # when build locally for testing!
# pip install build
# python -m build
#
# - name: Upload package to PyPI
# uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
# with:
# skip-existing: true
#
# hassfest-image:
# name: Build and test hassfest image
# runs-on: ubuntu-latest
# permissions:
# contents: read # To check out the repository
# packages: write # To push to GHCR
# attestations: write # For build provenance attestation
# id-token: write # For build provenance attestation
# needs: ["init"]
# if: github.repository_owner == 'home-assistant'
# env:
# HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest
# HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
# steps:
# - name: Checkout repository
# uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# with:
# persist-credentials: false
#
# - name: Login to GitHub Container Registry
# uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
# with:
# registry: ghcr.io
# username: ${{ github.repository_owner }}
# password: ${{ secrets.GITHUB_TOKEN }}
#
# - name: Build Docker image
# uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
# with:
# context: . # So action will not pull the repository again
# file: ./script/hassfest/docker/Dockerfile
# load: true
# tags: ${{ env.HASSFEST_IMAGE_TAG }}
#
# - name: Run hassfest against core
# run: docker run --rm -v "${GITHUB_WORKSPACE}":/github/workspace "${HASSFEST_IMAGE_TAG}" --core-path=/github/workspace
#
# - name: Push Docker image
# if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
# id: push
# uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
# with:
# context: . # So action will not pull the repository again
# file: ./script/hassfest/docker/Dockerfile
# push: true
# tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest
#
# - name: Generate artifact attestation
# if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
# uses: actions/attest@59d89421af93a897026c735860bf21b6eb4f7b26 # v4.1.0
# with:
# subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
# subject-digest: ${{ steps.push.outputs.digest }}
# push-to-registry: true
channel: ${{ needs.init.outputs.channel }}
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
- name: Update version file (stable -> beta)
if: needs.init.outputs.channel == 'stable'
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
with:
key: "homeassistant[]"
key-description: "Home Assistant Core"
version: ${{ needs.init.outputs.version }}
channel: beta
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
publish_container:
name: Publish meta container for ${{ matrix.registry }}
environment: ${{ needs.init.outputs.channel }}
if: github.repository_owner == 'home-assistant'
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read # To check out the repository
packages: write # To push to GHCR
id-token: write # For cosign signing
strategy:
fail-fast: false
matrix:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
cosign-release: "v2.5.3"
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Verify architecture image signatures
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
VERSION: ${{ needs.init.outputs.version }}
run: |
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
for arch in $ARCHS; do
echo "Verifying ${arch} image signature..."
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"
done
echo "✓ All images verified successfully"
# Generate all Docker tags based on version string
# Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
# Examples:
# 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
# 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
- name: Generate Docker metadata
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
images: ${{ matrix.registry }}/home-assistant
sep-tags: ","
tags: |
type=raw,value=${{ needs.init.outputs.version }},priority=9999
type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v3.7.1
- name: Copy architecture images to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
VERSION: ${{ needs.init.outputs.version }}
run: |
# Use imagetools to copy image blobs directly between registries
# This preserves provenance/attestations and seems to be much faster than pull/push
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
for arch in $ARCHS; do
echo "Copying ${arch} image to DockerHub..."
for attempt in 1 2 3; do
if docker buildx imagetools create \
--tag "docker.io/homeassistant/${arch}-homeassistant:${VERSION}" \
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"; then
break
fi
echo "Attempt ${attempt} failed, retrying in 10 seconds..."
sleep 10
if [ "${attempt}" -eq 3 ]; then
echo "Failed after 3 attempts"
exit 1
fi
done
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${VERSION}"
done
- name: Create and push multi-arch manifests
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
REGISTRY: ${{ matrix.registry }}
VERSION: ${{ needs.init.outputs.version }}
META_TAGS: ${{ steps.meta.outputs.tags }}
run: |
# Build list of architecture images dynamically
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
ARCH_IMAGES=()
for arch in $ARCHS; do
ARCH_IMAGES+=("${REGISTRY}/${arch}-homeassistant:${VERSION}")
done
# Build list of all tags for single manifest creation
# Note: Using sep-tags=',' in metadata-action for easier parsing
TAG_ARGS=()
IFS=',' read -ra TAGS <<< "${META_TAGS}"
for tag in "${TAGS[@]}"; do
TAG_ARGS+=("--tag" "${tag}")
done
# Create manifest with ALL tags in a single operation (much faster!)
echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
# Sign each tag separately (signing requires individual tag names)
echo "Signing all tags..."
for tag in "${TAGS[@]}"; do
echo "Signing ${tag}"
cosign sign --yes "${tag}"
done
echo "All manifests created and signed successfully"
build_python:
name: Build PyPi package
environment: ${{ needs.init.outputs.channel }}
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read # To check out the repository
id-token: write # For PyPI trusted publishing
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version-file: ".python-version"
- name: Download translations
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
with:
name: translations
- name: Extract translations
run: |
tar xvf translations.tar.gz
rm translations.tar.gz
- name: Build package
shell: bash
run: |
# Remove dist, build, and homeassistant.egg-info
# when build locally for testing!
pip install build
python -m build
- name: Upload package to PyPI
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
with:
skip-existing: true
hassfest-image:
name: Build and test hassfest image
runs-on: ubuntu-latest
permissions:
contents: read # To check out the repository
packages: write # To push to GHCR
attestations: write # For build provenance attestation
id-token: write # For build provenance attestation
needs: ["init"]
if: github.repository_owner == 'home-assistant'
env:
HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
load: true
tags: ${{ env.HASSFEST_IMAGE_TAG }}
- name: Run hassfest against core
run: docker run --rm -v "${GITHUB_WORKSPACE}":/github/workspace "${HASSFEST_IMAGE_TAG}" --core-path=/github/workspace
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
push: true
tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v4.1.0
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@@ -709,7 +709,7 @@ jobs:
run: |
. venv/bin/activate
python --version
pylint --ignore-missing-annotations=y homeassistant
pylint homeassistant
- name: Run pylint (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
@@ -718,7 +718,7 @@ jobs:
run: |
. venv/bin/activate
python --version
pylint --ignore-missing-annotations=y $(printf "homeassistant/components/%s " ${INTEGRATIONS_GLOB})
pylint $(printf "homeassistant/components/%s " ${INTEGRATIONS_GLOB})
pylint-tests:
name: Check pylint on tests
@@ -852,6 +852,10 @@ jobs:
needs:
- info
- base
- gen-requirements-all
- hassfest
- prek
- mypy
steps:
- name: Restore apt cache
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3

2
CODEOWNERS generated
View File

@@ -974,8 +974,6 @@ build.json @home-assistant/supervisor
/tests/components/logbook/ @home-assistant/core
/homeassistant/components/logger/ @home-assistant/core
/tests/components/logger/ @home-assistant/core
/homeassistant/components/lojack/ @devinslick
/tests/components/lojack/ @devinslick
/homeassistant/components/london_underground/ @jpbede
/tests/components/london_underground/ @jpbede
/homeassistant/components/lookin/ @ANMalko @bdraco

1
Dockerfile generated
View File

@@ -10,6 +10,7 @@ LABEL \
org.opencontainers.image.description="Open-source home automation platform running on Python 3" \
org.opencontainers.image.documentation="https://www.home-assistant.io/docs/" \
org.opencontainers.image.licenses="Apache-2.0" \
org.opencontainers.image.source="https://github.com/home-assistant/core" \
org.opencontainers.image.title="Home Assistant" \
org.opencontainers.image.url="https://www.home-assistant.io/"

View File

@@ -338,7 +338,6 @@ class Analytics:
hass = self._hass
supervisor_info = None
addons_info: dict[str, Any] | None = None
operating_system_info: dict[str, Any] = {}
if self._data.uuid is None:
@@ -348,7 +347,6 @@ class Analytics:
if self.supervisor:
supervisor_info = hassio.get_supervisor_info(hass)
operating_system_info = hassio.get_os_info(hass) or {}
addons_info = hassio.get_addons_info(hass) or {}
system_info = await async_get_system_info(hass)
integrations = []
@@ -421,10 +419,13 @@ class Analytics:
integrations.append(integration.domain)
if addons_info is not None:
if supervisor_info is not None:
supervisor_client = hassio.get_supervisor_client(hass)
installed_addons = await asyncio.gather(
*(supervisor_client.addons.addon_info(slug) for slug in addons_info)
*(
supervisor_client.addons.addon_info(addon[ATTR_SLUG])
for addon in supervisor_info[ATTR_ADDONS]
)
)
addons.extend(
{

View File

@@ -121,7 +121,6 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
"alarm_control_panel",
"assist_satellite",
"climate",
"cover",
"device_tracker",
"fan",
"humidifier",

View File

@@ -153,8 +153,8 @@ def websocket_get_entities(
{
vol.Required("type"): "config/entity_registry/update",
vol.Required("entity_id"): cv.entity_id,
vol.Optional("aliases"): [vol.Any(str, None)],
# If passed in, we update value. Passing None will remove old value.
vol.Optional("aliases"): list,
vol.Optional("area_id"): vol.Any(str, None),
# Categories is a mapping of key/value (scope/category_id) pairs.
# If passed in, we update/adjust only the provided scope(s).
@@ -225,15 +225,10 @@ def websocket_update_entity(
changes[key] = msg[key]
if "aliases" in msg:
# Sanitize aliases by removing:
# - Trailing and leading whitespace characters in the individual aliases
# Create a set for the aliases without:
# - Empty strings
changes["aliases"] = aliases = []
for alias in msg["aliases"]:
if alias is None:
aliases.append(er.COMPUTED_NAME)
elif alias := alias.strip():
aliases.append(alias)
# - Trailing and leading whitespace characters in the individual aliases
changes["aliases"] = {s_strip for s in msg["aliases"] if (s_strip := s.strip())}
if "labels" in msg:
# Convert labels to a set

View File

@@ -992,11 +992,18 @@ class DefaultAgent(ConversationEntity):
continue
context[attr] = state.attributes[attr]
entity_entry = entity_registry.async_get(state.entity_id)
for name in intent.async_get_entity_aliases(
self.hass, entity_entry, state=state
):
yield (name, name, context)
if (
entity := entity_registry.async_get(state.entity_id)
) and entity.aliases:
for alias in entity.aliases:
alias = alias.strip()
if not alias:
continue
yield (alias, alias, context)
# Default name
yield (state.name, state.name, context)
def _recognize_strict(
self,

View File

@@ -1,103 +0,0 @@
"""Provides conditions for covers."""
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.core import HomeAssistant, State, split_entity_id
from homeassistant.helpers.condition import Condition, EntityConditionBase
from .const import ATTR_IS_CLOSED, DOMAIN, CoverDeviceClass
from .models import CoverDomainSpec
class CoverConditionBase(EntityConditionBase[CoverDomainSpec]):
"""Base condition for cover state checks."""
def is_valid_state(self, entity_state: State) -> bool:
"""Check if the state matches the expected cover state."""
domain_spec = self._domain_specs[split_entity_id(entity_state.entity_id)[0]]
if domain_spec.value_source is not None:
return (
entity_state.attributes.get(domain_spec.value_source)
== domain_spec.target_value
)
return entity_state.state == domain_spec.target_value
def make_cover_is_open_condition(
*, device_classes: dict[str, str]
) -> type[CoverConditionBase]:
"""Create a condition for cover is open."""
class CoverIsOpenCondition(CoverConditionBase):
"""Condition for cover is open."""
_domain_specs = {
domain: CoverDomainSpec(
device_class=dc,
value_source=ATTR_IS_CLOSED if domain == DOMAIN else None,
target_value=False if domain == DOMAIN else STATE_ON,
)
for domain, dc in device_classes.items()
}
return CoverIsOpenCondition
def make_cover_is_closed_condition(
*, device_classes: dict[str, str]
) -> type[CoverConditionBase]:
"""Create a condition for cover is closed."""
class CoverIsClosedCondition(CoverConditionBase):
"""Condition for cover is closed."""
_domain_specs = {
domain: CoverDomainSpec(
device_class=dc,
value_source=ATTR_IS_CLOSED if domain == DOMAIN else None,
target_value=True if domain == DOMAIN else STATE_OFF,
)
for domain, dc in device_classes.items()
}
return CoverIsClosedCondition
DEVICE_CLASSES_AWNING: dict[str, str] = {DOMAIN: CoverDeviceClass.AWNING}
DEVICE_CLASSES_BLIND: dict[str, str] = {DOMAIN: CoverDeviceClass.BLIND}
DEVICE_CLASSES_CURTAIN: dict[str, str] = {DOMAIN: CoverDeviceClass.CURTAIN}
DEVICE_CLASSES_SHADE: dict[str, str] = {DOMAIN: CoverDeviceClass.SHADE}
DEVICE_CLASSES_SHUTTER: dict[str, str] = {DOMAIN: CoverDeviceClass.SHUTTER}
CONDITIONS: dict[str, type[Condition]] = {
"awning_is_closed": make_cover_is_closed_condition(
device_classes=DEVICE_CLASSES_AWNING
),
"awning_is_open": make_cover_is_open_condition(
device_classes=DEVICE_CLASSES_AWNING
),
"blind_is_closed": make_cover_is_closed_condition(
device_classes=DEVICE_CLASSES_BLIND
),
"blind_is_open": make_cover_is_open_condition(device_classes=DEVICE_CLASSES_BLIND),
"curtain_is_closed": make_cover_is_closed_condition(
device_classes=DEVICE_CLASSES_CURTAIN
),
"curtain_is_open": make_cover_is_open_condition(
device_classes=DEVICE_CLASSES_CURTAIN
),
"shade_is_closed": make_cover_is_closed_condition(
device_classes=DEVICE_CLASSES_SHADE
),
"shade_is_open": make_cover_is_open_condition(device_classes=DEVICE_CLASSES_SHADE),
"shutter_is_closed": make_cover_is_closed_condition(
device_classes=DEVICE_CLASSES_SHUTTER
),
"shutter_is_open": make_cover_is_open_condition(
device_classes=DEVICE_CLASSES_SHUTTER
),
}
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
"""Return the conditions for covers."""
return CONDITIONS

View File

@@ -1,80 +0,0 @@
.condition_common_fields: &condition_common_fields
behavior:
required: true
default: any
selector:
select:
translation_key: condition_behavior
options:
- all
- any
awning_is_closed:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: awning
awning_is_open:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: awning
blind_is_closed:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: blind
blind_is_open:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: blind
curtain_is_closed:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: curtain
curtain_is_open:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: curtain
shade_is_closed:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: shade
shade_is_open:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: shade
shutter_is_closed:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: shutter
shutter_is_open:
fields: *condition_common_fields
target:
entity:
- domain: cover
device_class: shutter

View File

@@ -1,36 +1,4 @@
{
"conditions": {
"awning_is_closed": {
"condition": "mdi:storefront-outline"
},
"awning_is_open": {
"condition": "mdi:storefront-outline"
},
"blind_is_closed": {
"condition": "mdi:blinds-horizontal-closed"
},
"blind_is_open": {
"condition": "mdi:blinds-horizontal"
},
"curtain_is_closed": {
"condition": "mdi:curtains-closed"
},
"curtain_is_open": {
"condition": "mdi:curtains"
},
"shade_is_closed": {
"condition": "mdi:roller-shade-closed"
},
"shade_is_open": {
"condition": "mdi:roller-shade"
},
"shutter_is_closed": {
"condition": "mdi:window-shutter"
},
"shutter_is_open": {
"condition": "mdi:window-shutter-open"
}
},
"entity_component": {
"_": {
"default": "mdi:window-open",

View File

@@ -1,12 +0,0 @@
"""Data models for the cover integration."""
from dataclasses import dataclass
from homeassistant.helpers.automation import DomainSpec
@dataclass(frozen=True, slots=True)
class CoverDomainSpec(DomainSpec):
"""DomainSpec with a target value for comparison."""
target_value: str | bool | None = None

View File

@@ -1,112 +1,8 @@
{
"common": {
"condition_behavior_description": "How the state should match on the targeted covers.",
"condition_behavior_name": "Behavior",
"trigger_behavior_description": "The behavior of the targeted covers to trigger on.",
"trigger_behavior_name": "Behavior"
},
"conditions": {
"awning_is_closed": {
"description": "Tests if one or more awnings are closed.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Awning is closed"
},
"awning_is_open": {
"description": "Tests if one or more awnings are open.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Awning is open"
},
"blind_is_closed": {
"description": "Tests if one or more blinds are closed.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Blind is closed"
},
"blind_is_open": {
"description": "Tests if one or more blinds are open.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Blind is open"
},
"curtain_is_closed": {
"description": "Tests if one or more curtains are closed.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Curtain is closed"
},
"curtain_is_open": {
"description": "Tests if one or more curtains are open.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Curtain is open"
},
"shade_is_closed": {
"description": "Tests if one or more shades are closed.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Shade is closed"
},
"shade_is_open": {
"description": "Tests if one or more shades are open.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Shade is open"
},
"shutter_is_closed": {
"description": "Tests if one or more shutters are closed.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Shutter is closed"
},
"shutter_is_open": {
"description": "Tests if one or more shutters are open.",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::condition_behavior_description%]",
"name": "[%key:component::cover::common::condition_behavior_name%]"
}
},
"name": "Shutter is open"
}
},
"device_automation": {
"action_type": {
"close": "Close {entity_name}",
@@ -191,12 +87,6 @@
}
},
"selector": {
"condition_behavior": {
"options": {
"all": "All",
"any": "Any"
}
},
"trigger_behavior": {
"options": {
"any": "Any",

View File

@@ -1,11 +1,20 @@
"""Provides triggers for covers."""
from dataclasses import dataclass
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.core import HomeAssistant, State, split_entity_id
from homeassistant.helpers.automation import DomainSpec
from homeassistant.helpers.trigger import EntityTriggerBase, Trigger
from .const import ATTR_IS_CLOSED, DOMAIN, CoverDeviceClass
from .models import CoverDomainSpec
@dataclass(frozen=True, slots=True)
class CoverDomainSpec(DomainSpec):
"""DomainSpec with a target value for comparison."""
target_value: str | bool | None = None
class CoverTriggerBase(EntityTriggerBase[CoverDomainSpec]):

View File

@@ -48,7 +48,7 @@ def async_redact_data[_T](data: _T, to_redact: Iterable[Any]) -> _T:
def _entity_entry_filter(a: attr.Attribute, _: Any) -> bool:
return a.name not in ("_cache", "compat_aliases", "compat_name")
return a.name != "_cache"
@callback

View File

@@ -160,23 +160,6 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
_native_supported_color_modes: tuple[ESPHomeColorMode, ...]
_supports_color_mode = False
def _color_temp_to_cold_warm(self, color_temp_mired: float) -> tuple[float, float]:
"""Convert a color temperature in mireds to cold/warm white fractions.
Returns (cold_white, warm_white) normalized so the brighter channel
is 1.0.
"""
static_info = self._static_info
min_mireds = static_info.min_mireds
max_mireds = static_info.max_mireds
if max_mireds <= min_mireds:
return 1.0, 1.0
color_temp_clamped = min(max(color_temp_mired, min_mireds), max_mireds)
ww_frac = (color_temp_clamped - min_mireds) / (max_mireds - min_mireds)
cw_frac = 1 - ww_frac
max_frac = max(cw_frac, ww_frac)
return cw_frac / max_frac, ww_frac / max_frac
@property
@esphome_state_property
def is_on(self) -> bool:
@@ -258,19 +241,12 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
if (color_temp_k := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) is not None:
# Do not use kelvin_to_mired here to prevent precision loss
color_temp_mired = 1_000_000.0 / color_temp_k
data["color_temperature"] = 1_000_000.0 / color_temp_k
if color_temp_modes := _filter_color_modes(
color_modes, LightColorCapability.COLOR_TEMPERATURE
):
data["color_temperature"] = color_temp_mired
color_modes = color_temp_modes
else:
# Convert color temperature to explicit cold/warm white
# values to avoid ESPHome applying brightness to both
# master brightness and white channels (b² effect).
data["cold_white"], data["warm_white"] = self._color_temp_to_cold_warm(
color_temp_mired
)
color_modes = _filter_color_modes(
color_modes, LightColorCapability.COLD_WARM_WHITE
)
@@ -369,13 +345,19 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
self._native_supported_color_modes, LightColorCapability.COLD_WARM_WHITE
):
# Try to reverse white + color temp to cwww
static_info = self._static_info
min_ct = static_info.min_mireds
max_ct = static_info.max_mireds
color_temp = min(max(state.color_temperature, min_ct), max_ct)
white = state.white
cw, ww = self._color_temp_to_cold_warm(state.color_temperature)
ww_frac = (color_temp - min_ct) / (max_ct - min_ct)
cw_frac = 1 - ww_frac
return (
*rgb,
round(white * cw * 255),
round(white * ww * 255),
round(white * cw_frac / max(cw_frac, ww_frac) * 255),
round(white * ww_frac / max(cw_frac, ww_frac) * 255),
)
return (
*rgb,

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from aiohttp import ClientError
@@ -57,42 +56,3 @@ class FreshrFlowHandler(ConfigFlow, domain=DOMAIN):
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, _user_input: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauthentication confirmation."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
client = FreshrClient(session=async_get_clientsession(self.hass))
try:
await client.login(
reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD]
)
except LoginError:
errors["base"] = "invalid_auth"
except ClientError:
errors["base"] = "cannot_connect"
except Exception: # noqa: BLE001
LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}),
description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]},
errors=errors,
)

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/freshr",
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"quality_scale": "bronze",
"requirements": ["pyfreshr==1.2.0"]
}

View File

@@ -36,7 +36,7 @@ rules:
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: done
reauthentication-flow: todo
test-coverage: done
# Gold

View File

@@ -2,7 +2,9 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"wrong_account": "Cannot change the account username."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
@@ -10,15 +12,6 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::freshr::config::step::user::data_description::password%]"
},
"description": "Re-enter the password for your Fresh-r account `{username}`."
},
"user": {
"data": {
"password": "[%key:common::config_flow::data::password%]",

View File

@@ -4,9 +4,9 @@ set_guest_wifi_password:
required: true
selector:
device:
integration: fritz
entity:
integration: fritz
domain: update
device_class: connectivity
password:
required: false
selector:
@@ -23,9 +23,9 @@ dial:
required: true
selector:
device:
integration: fritz
entity:
integration: fritz
domain: update
device_class: connectivity
number:
required: true
selector:

View File

@@ -7,7 +7,7 @@ import logging
from bleak.backends.device import BLEDevice
from gardena_bluetooth.client import CachedConnection, Client
from gardena_bluetooth.const import AquaContour, DeviceConfiguration, DeviceInformation
from gardena_bluetooth.const import DeviceConfiguration, DeviceInformation
from gardena_bluetooth.exceptions import (
CharacteristicNoAccess,
CharacteristicNotFound,
@@ -35,7 +35,6 @@ PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.BUTTON,
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
Platform.VALVE,
@@ -91,10 +90,8 @@ async def async_setup_entry(
name = entry.title
name = await client.read_char(DeviceConfiguration.custom_device_name, name)
name = await client.read_char(AquaContour.custom_device_name, name)
await _update_timestamp(client, DeviceConfiguration.unix_timestamp)
await _update_timestamp(client, AquaContour.unix_timestamp)
except (TimeoutError, CommunicationFailure, DeviceUnavailable) as exception:
await client.disconnect()

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from dataclasses import dataclass, field
from gardena_bluetooth.const import AquaContour, Sensor, Valve
from gardena_bluetooth.const import Sensor, Valve
from gardena_bluetooth.parse import CharacteristicBool
from homeassistant.components.binary_sensor import (
@@ -47,13 +47,6 @@ DESCRIPTIONS = (
entity_category=EntityCategory.DIAGNOSTIC,
char=Sensor.connected_state,
),
GardenaBluetoothBinarySensorEntityDescription(
key=AquaContour.frost_warning.unique_id,
translation_key="frost_warning",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
char=AquaContour.frost_warning,
),
)

View File

@@ -43,7 +43,6 @@ def _is_supported(discovery_info: BluetoothServiceInfo):
ProductType.WATER_COMPUTER,
ProductType.AUTOMATS,
ProductType.PRESSURE_TANKS,
ProductType.AQUA_CONTOURS,
):
_LOGGER.debug("Unsupported device: %s", manufacturer_data)
return False
@@ -71,7 +70,6 @@ class GardenaBluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
async def async_read_data(self):
"""Try to connect to device and extract information."""
assert self.address
client = Client(get_connection(self.hass, self.address))
try:
model = await client.read_char(DeviceInformation.model_number)

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from dataclasses import dataclass, field
from gardena_bluetooth.const import DeviceConfiguration, Sensor, Spray, Valve
from gardena_bluetooth.const import DeviceConfiguration, Sensor, Valve
from gardena_bluetooth.parse import (
Characteristic,
CharacteristicInt,
@@ -18,7 +18,7 @@ from homeassistant.components.number import (
NumberEntityDescription,
NumberMode,
)
from homeassistant.const import DEGREE, PERCENTAGE, EntityCategory, UnitOfTime
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTime
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -34,7 +34,6 @@ class GardenaBluetoothNumberEntityDescription(NumberEntityDescription):
default_factory=lambda: CharacteristicInt("")
)
connected_state: Characteristic | None = None
scale: float = 1.0
@property
def context(self) -> set[str]:
@@ -105,27 +104,6 @@ DESCRIPTIONS = (
char=Sensor.threshold,
connected_state=Sensor.connected_state,
),
GardenaBluetoothNumberEntityDescription(
key="spray_sector",
translation_key="spray_sector",
native_unit_of_measurement=DEGREE,
mode=NumberMode.BOX,
native_min_value=0.0,
native_max_value=359.0,
native_step=1.0,
char=Spray.sector,
),
GardenaBluetoothNumberEntityDescription(
key="spray_distance",
translation_key="spray_distance",
native_unit_of_measurement=PERCENTAGE,
mode=NumberMode.SLIDER,
native_min_value=0.0,
native_max_value=100.0,
native_step=0.1,
char=Spray.distance,
scale=10.0,
),
)
@@ -156,7 +134,7 @@ class GardenaBluetoothNumber(GardenaBluetoothDescriptorEntity, NumberEntity):
if data is None:
self._attr_native_value = None
else:
self._attr_native_value = float(data) / self.entity_description.scale
self._attr_native_value = float(data)
if char := self.entity_description.connected_state:
self._attr_available = bool(self.coordinator.get_cached(char))
@@ -167,9 +145,7 @@ class GardenaBluetoothNumber(GardenaBluetoothDescriptorEntity, NumberEntity):
async def async_set_native_value(self, value: float) -> None:
"""Set new value."""
await self.coordinator.write(
self.entity_description.char, int(value * self.entity_description.scale)
)
await self.coordinator.write(self.entity_description.char, int(value))
self.async_write_ha_state()

View File

@@ -1,113 +0,0 @@
"""Support for select entities."""
from __future__ import annotations
from dataclasses import dataclass, field
from enum import IntEnum
from gardena_bluetooth.const import (
AquaContour,
AquaContourPosition,
AquaContourWatering,
)
from gardena_bluetooth.parse import CharacteristicInt
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import GardenaBluetoothConfigEntry
from .entity import GardenaBluetoothDescriptorEntity
def _enum_to_int(enum: type[IntEnum]) -> dict[str, int]:
return {member.name.lower(): member.value for member in enum}
def _reverse_dict(value: dict[str, int]) -> dict[int, str]:
return {value: key for key, value in value.items()}
@dataclass(frozen=True, kw_only=True)
class GardenaBluetoothSelectEntityDescription(SelectEntityDescription):
"""Description of entity."""
key: str = field(init=False)
char: CharacteristicInt
option_to_number: dict[str, int]
number_to_option: dict[int, str] = field(init=False)
def __post_init__(self):
"""Initialize calculated fields."""
object.__setattr__(self, "key", self.char.unique_id)
object.__setattr__(self, "options", list(self.option_to_number.keys()))
object.__setattr__(
self, "number_to_option", _reverse_dict(self.option_to_number)
)
@property
def context(self) -> set[str]:
"""Context needed for update coordinator."""
return {self.char.uuid}
DESCRIPTIONS = (
GardenaBluetoothSelectEntityDescription(
translation_key="watering_active",
char=AquaContourWatering.watering_active,
option_to_number=_enum_to_int(AquaContourWatering.watering_active.enum),
),
GardenaBluetoothSelectEntityDescription(
translation_key="operation_mode",
char=AquaContour.operation_mode,
option_to_number=_enum_to_int(AquaContour.operation_mode.enum),
),
GardenaBluetoothSelectEntityDescription(
translation_key="active_position",
char=AquaContourPosition.active_position,
option_to_number={
"position_1": 1,
"position_2": 2,
"position_3": 3,
"position_4": 4,
"position_5": 5,
},
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: GardenaBluetoothConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up select based on a config entry."""
coordinator = entry.runtime_data
entities = [
GardenaBluetoothSelectEntity(coordinator, description, description.context)
for description in DESCRIPTIONS
if description.char.unique_id in coordinator.characteristics
]
async_add_entities(entities)
class GardenaBluetoothSelectEntity(GardenaBluetoothDescriptorEntity, SelectEntity):
"""Representation of a select entity."""
entity_description: GardenaBluetoothSelectEntityDescription
@property
def current_option(self) -> str | None:
"""Return the selected entity option to represent the entity state."""
char = self.entity_description.char
value = self.coordinator.get_cached(char)
if value is None:
return None
return self.entity_description.number_to_option.get(value)
async def async_select_option(self, option: str) -> None:
"""Change the selected option."""
char = self.entity_description.char
value = self.entity_description.option_to_number[option]
await self.coordinator.write(char, value)
self.async_write_ha_state()

View File

@@ -2,19 +2,10 @@
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass, field
from datetime import UTC, datetime, timedelta
from gardena_bluetooth.const import (
AquaContourBattery,
Battery,
EventHistory,
FlowStatistics,
Sensor,
Spray,
Valve,
)
from gardena_bluetooth.const import Battery, Sensor, Valve
from gardena_bluetooth.parse import Characteristic
from homeassistant.components.sensor import (
@@ -22,15 +13,8 @@ from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
SensorStateClass,
StateType,
)
from homeassistant.const import (
DEGREE,
PERCENTAGE,
EntityCategory,
UnitOfVolume,
UnitOfVolumeFlowRate,
)
from homeassistant.const import PERCENTAGE, EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import dt as dt_util
@@ -38,28 +22,13 @@ from homeassistant.util import dt as dt_util
from .coordinator import GardenaBluetoothConfigEntry, GardenaBluetoothCoordinator
from .entity import GardenaBluetoothDescriptorEntity, GardenaBluetoothEntity
type SensorRawType = StateType | datetime
def _get_timestamp(value: datetime | None):
if value is None:
return None
return value.replace(tzinfo=dt_util.get_default_time_zone())
def _get_distance_ratio(value: int | None):
if value is None:
return None
return value / 1000
@dataclass(frozen=True)
class GardenaBluetoothSensorEntityDescription[T](SensorEntityDescription):
class GardenaBluetoothSensorEntityDescription(SensorEntityDescription):
"""Description of entity."""
char: Characteristic[T] = field(default_factory=lambda: Characteristic(""))
char: Characteristic = field(default_factory=lambda: Characteristic(""))
connected_state: Characteristic | None = None
get: Callable[[T | None], SensorRawType] = lambda x: x # type: ignore[assignment, return-value]
@property
def context(self) -> set[str]:
@@ -87,14 +56,6 @@ DESCRIPTIONS = (
native_unit_of_measurement=PERCENTAGE,
char=Battery.battery_level,
),
GardenaBluetoothSensorEntityDescription(
key=AquaContourBattery.battery_level.unique_id,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.BATTERY,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=PERCENTAGE,
char=AquaContourBattery.battery_level,
),
GardenaBluetoothSensorEntityDescription(
key=Sensor.battery_level.unique_id,
translation_key="sensor_battery_level",
@@ -127,78 +88,6 @@ DESCRIPTIONS = (
entity_category=EntityCategory.DIAGNOSTIC,
char=Sensor.measurement_timestamp,
connected_state=Sensor.connected_state,
get=_get_timestamp,
),
GardenaBluetoothSensorEntityDescription(
key=FlowStatistics.overall.unique_id,
translation_key="flow_statistics_overall",
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.VOLUME,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=UnitOfVolume.LITERS,
char=FlowStatistics.overall,
),
GardenaBluetoothSensorEntityDescription(
key=FlowStatistics.current.unique_id,
translation_key="flow_statistics_current",
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE,
char=FlowStatistics.current,
),
GardenaBluetoothSensorEntityDescription(
key=FlowStatistics.resettable.unique_id,
translation_key="flow_statistics_resettable",
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.VOLUME,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=UnitOfVolume.LITERS,
char=FlowStatistics.resettable,
),
GardenaBluetoothSensorEntityDescription(
key=FlowStatistics.last_reset.unique_id,
translation_key="flow_statistics_reset_timestamp",
device_class=SensorDeviceClass.TIMESTAMP,
entity_category=EntityCategory.DIAGNOSTIC,
char=FlowStatistics.last_reset,
get=_get_timestamp,
),
GardenaBluetoothSensorEntityDescription(
key=Spray.current_distance.unique_id,
translation_key="spray_current_distance",
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=PERCENTAGE,
char=Spray.current_distance,
get=_get_distance_ratio,
),
GardenaBluetoothSensorEntityDescription(
key=Spray.current_sector.unique_id,
translation_key="spray_current_sector",
state_class=SensorStateClass.MEASUREMENT_ANGLE,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=DEGREE,
char=Spray.current_sector,
),
GardenaBluetoothSensorEntityDescription(
key="aqua_contour_error",
translation_key="aqua_contour_error",
entity_category=EntityCategory.DIAGNOSTIC,
device_class=SensorDeviceClass.ENUM,
char=EventHistory.error,
get=lambda x: (
x.error_code.name.lower()
if x and isinstance(x.error_code, EventHistory.error.enum)
else None
),
options=[member.name.lower() for member in EventHistory.error.enum],
),
GardenaBluetoothSensorEntityDescription(
key="aqua_contour_error_timestamp",
translation_key="error_timestamp",
entity_category=EntityCategory.DIAGNOSTIC,
device_class=SensorDeviceClass.TIMESTAMP,
char=EventHistory.error,
get=lambda x: _get_timestamp(x.time_stamp) if x else None,
),
)
@@ -227,7 +116,8 @@ class GardenaBluetoothSensor(GardenaBluetoothDescriptorEntity, SensorEntity):
def _handle_coordinator_update(self) -> None:
value = self.coordinator.get_cached(self.entity_description.char)
value = self.entity_description.get(value)
if isinstance(value, datetime):
value = value.replace(tzinfo=dt_util.get_default_time_zone())
self._attr_native_value = value
if char := self.entity_description.connected_state:

View File

@@ -22,9 +22,6 @@
},
"entity": {
"binary_sensor": {
"frost_warning": {
"name": "Frost"
},
"sensor_connected_state": {
"name": "Sensor connection"
},
@@ -55,79 +52,12 @@
},
"sensor_threshold": {
"name": "Sensor threshold"
},
"spray_distance": {
"name": "Distance"
},
"spray_sector": {
"name": "Sector"
}
},
"select": {
"active_position": {
"name": "Active position",
"state": {
"position_1": "Position 1",
"position_2": "Position 2",
"position_3": "Position 3",
"position_4": "Position 4",
"position_5": "Position 5"
}
},
"operation_mode": {
"name": "Operation mode",
"state": {
"active": "Active",
"deep_sleep": "Deep sleep",
"manual_mode": "Manual",
"pre_winter": "Winter preparation"
}
},
"watering_active": {
"name": "Watering",
"state": {
"contour_1": "Contour 1",
"contour_2": "Contour 2",
"contour_3": "Contour 3",
"contour_4": "Contour 4",
"contour_5": "Contour 5",
"preview": "Preview",
"rest": "Idle",
"setup_mode": "Setup"
}
}
},
"sensor": {
"activation_reason": {
"name": "Activation reason"
},
"aqua_contour_error": {
"name": "Error",
"state": {
"charger_error": "Charger error",
"flash_error": "Flash error",
"no_error": "No error detected",
"no_water": "Not enough water",
"rotation_sensor_error": "Rotation sensor error",
"sprinkler_motor_error": "Sprinkler motor error",
"valve_motor_error": "Valve motor error"
}
},
"error_timestamp": {
"name": "Error timestamp"
},
"flow_statistics_current": {
"name": "Current flow"
},
"flow_statistics_overall": {
"name": "Overall flow"
},
"flow_statistics_reset_timestamp": {
"name": "Flow reset timestamp"
},
"flow_statistics_resettable": {
"name": "Flow since reset"
},
"remaining_open_timestamp": {
"name": "Valve closing"
},
@@ -139,12 +69,6 @@
},
"sensor_type": {
"name": "Sensor type"
},
"spray_current_distance": {
"name": "Current distance"
},
"spray_current_sector": {
"name": "Current sector"
}
},
"switch": {

View File

@@ -29,7 +29,6 @@ from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
entity_registry as er,
intent,
start,
)
from homeassistant.helpers.event import async_call_later
@@ -598,6 +597,7 @@ class GoogleEntity:
state = self.state
traits = self.traits()
entity_config = self.config.entity_config.get(state.entity_id, {})
name = (entity_config.get(CONF_NAME) or state.name).strip()
# Find entity/device/area registry entries
entity_entry, device_entry, area_entry = _get_registry_entries(
@@ -607,6 +607,7 @@ class GoogleEntity:
# Build the device info
device = {
"id": state.entity_id,
"name": {"name": name},
"attributes": {},
"traits": [trait.name for trait in traits],
"willReportState": self.config.should_report_state,
@@ -614,18 +615,13 @@ class GoogleEntity:
state.domain, state.attributes.get(ATTR_DEVICE_CLASS)
),
}
# Add name and aliases.
# The entity's alias list is ordered: the first slot naturally serves
# as the primary name (set to the auto-generated full entity name by
# default), while the rest serve as alternative names (nicknames).
aliases = intent.async_get_entity_aliases(
self.hass, entity_entry, state=state, allow_empty=False
)
name, *aliases = aliases
name = entity_config.get(CONF_NAME) or name
device["name"] = {"name": name}
if (config_aliases := entity_config.get(CONF_ALIASES, [])) or aliases:
device["name"]["nicknames"] = [name, *config_aliases, *aliases]
# Add aliases
if (config_aliases := entity_config.get(CONF_ALIASES, [])) or (
entity_entry and entity_entry.aliases
):
device["name"]["nicknames"] = [name, *config_aliases]
if entity_entry:
device["name"]["nicknames"].extend(entity_entry.aliases)
# Add local SDK info if enabled
if self.config.is_local_sdk_active and self.should_expose_local():

View File

@@ -239,9 +239,6 @@ def _login_classic_api(
return login_response
V1_DEVICE_TYPES: dict[int, str] = {5: "sph", 7: "min"}
def get_device_list_v1(
api, config: Mapping[str, str]
) -> tuple[list[dict[str, str]], str]:
@@ -263,17 +260,18 @@ def get_device_list_v1(
f"API error during device list: {e.error_msg or str(e)} (Code: {e.error_code})"
) from e
devices = devices_dict.get("devices", [])
# Only MIN device (type = 7) support implemented in current V1 API
supported_devices = [
{
"deviceSn": device.get("device_sn", ""),
"deviceType": V1_DEVICE_TYPES[device.get("type")],
"deviceType": "min",
}
for device in devices
if device.get("type") in V1_DEVICE_TYPES
if device.get("type") == 7
]
for device in devices:
if device.get("type") not in V1_DEVICE_TYPES:
if device.get("type") != 7:
_LOGGER.warning(
"Device %s with type %s not supported in Open API V1, skipping",
device.get("device_sn", ""),
@@ -350,7 +348,7 @@ async def async_setup_entry(
hass, config_entry, device["deviceSn"], device["deviceType"], plant_id
)
for device in devices
if device["deviceType"] in ["inverter", "tlx", "storage", "mix", "min", "sph"]
if device["deviceType"] in ["inverter", "tlx", "storage", "mix", "min"]
}
# Perform the first refresh for the total coordinator

View File

@@ -167,36 +167,6 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
**storage_info_detail["storageDetailBean"],
**storage_energy_overview,
}
elif self.device_type == "sph":
try:
sph_detail = self.api.sph_detail(self.device_id)
sph_energy = self.api.sph_energy(self.device_id)
except growattServer.GrowattV1ApiError as err:
if err.error_code == V1_API_ERROR_NO_PRIVILEGE:
raise ConfigEntryAuthFailed(
f"Authentication failed for Growatt API: {err.error_msg or str(err)}"
) from err
raise UpdateFailed(f"Error fetching SPH device data: {err}") from err
combined = {**sph_detail, **sph_energy}
# Parse last update timestamp from sph_energy "time" field
time_str = sph_energy.get("time")
if time_str:
try:
parsed = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S")
combined["lastdataupdate"] = parsed.replace(
tzinfo=dt_util.get_default_time_zone()
)
except ValueError, TypeError:
_LOGGER.debug(
"Could not parse SPH time field for %s: %r",
self.device_id,
time_str,
)
self.data = combined
_LOGGER.debug("sph_info for device %s: %r", self.device_id, self.data)
elif self.device_type == "mix":
mix_info = self.api.mix_info(self.device_id)
mix_totals = self.api.mix_totals(self.device_id, self.plant_id)
@@ -478,123 +448,3 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
return "00:00"
else:
return f"{hour:02d}:{minute:02d}"
async def update_ac_charge_times(
self,
charge_power: int,
charge_stop_soc: int,
mains_enabled: bool,
periods: list[dict],
) -> None:
"""Update AC charge time periods for SPH device.
Args:
charge_power: Charge power limit (0-100 %)
charge_stop_soc: Stop charging at this SOC level (0-100 %)
mains_enabled: Whether AC (mains) charging is enabled
periods: List of up to 3 dicts with keys start_time, end_time, enabled
"""
if self.api_version != "v1":
raise ServiceValidationError(
"Updating AC charge times requires token authentication"
)
try:
await self.hass.async_add_executor_job(
self.api.sph_write_ac_charge_times,
self.device_id,
charge_power,
charge_stop_soc,
mains_enabled,
periods,
)
except growattServer.GrowattV1ApiError as err:
raise HomeAssistantError(
f"API error updating AC charge times: {err}"
) from err
if self.data:
self.data["chargePowerCommand"] = charge_power
self.data["wchargeSOCLowLimit"] = charge_stop_soc
self.data["acChargeEnable"] = 1 if mains_enabled else 0
for i, period in enumerate(periods, 1):
self.data[f"forcedChargeTimeStart{i}"] = period["start_time"].strftime(
"%H:%M"
)
self.data[f"forcedChargeTimeStop{i}"] = period["end_time"].strftime(
"%H:%M"
)
self.data[f"forcedChargeStopSwitch{i}"] = (
1 if period.get("enabled", False) else 0
)
self.async_set_updated_data(self.data)
async def update_ac_discharge_times(
self,
discharge_power: int,
discharge_stop_soc: int,
periods: list[dict],
) -> None:
"""Update AC discharge time periods for SPH device.
Args:
discharge_power: Discharge power limit (0-100 %)
discharge_stop_soc: Stop discharging at this SOC level (0-100 %)
periods: List of up to 3 dicts with keys start_time, end_time, enabled
"""
if self.api_version != "v1":
raise ServiceValidationError(
"Updating AC discharge times requires token authentication"
)
try:
await self.hass.async_add_executor_job(
self.api.sph_write_ac_discharge_times,
self.device_id,
discharge_power,
discharge_stop_soc,
periods,
)
except growattServer.GrowattV1ApiError as err:
raise HomeAssistantError(
f"API error updating AC discharge times: {err}"
) from err
if self.data:
self.data["disChargePowerCommand"] = discharge_power
self.data["wdisChargeSOCLowLimit"] = discharge_stop_soc
for i, period in enumerate(periods, 1):
self.data[f"forcedDischargeTimeStart{i}"] = period[
"start_time"
].strftime("%H:%M")
self.data[f"forcedDischargeTimeStop{i}"] = period["end_time"].strftime(
"%H:%M"
)
self.data[f"forcedDischargeStopSwitch{i}"] = (
1 if period.get("enabled", False) else 0
)
self.async_set_updated_data(self.data)
async def read_ac_charge_times(self) -> dict:
"""Read AC charge time settings from SPH device cache."""
if self.api_version != "v1":
raise ServiceValidationError(
"Reading AC charge times requires token authentication"
)
if not self.data:
await self.async_refresh()
return self.api.sph_read_ac_charge_times(settings_data=self.data)
async def read_ac_discharge_times(self) -> dict:
"""Read AC discharge time settings from SPH device cache."""
if self.api_version != "v1":
raise ServiceValidationError(
"Reading AC discharge times requires token authentication"
)
if not self.data:
await self.async_refresh()
return self.api.sph_read_ac_discharge_times(settings_data=self.data)

View File

@@ -1,22 +1,10 @@
{
"services": {
"read_ac_charge_times": {
"service": "mdi:battery-clock-outline"
},
"read_ac_discharge_times": {
"service": "mdi:battery-clock-outline"
},
"read_time_segments": {
"service": "mdi:clock-outline"
},
"update_time_segment": {
"service": "mdi:clock-edit"
},
"write_ac_charge_times": {
"service": "mdi:battery-clock"
},
"write_ac_discharge_times": {
"service": "mdi:battery-clock"
}
}
}

View File

@@ -7,6 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["growattServer"],
"quality_scale": "silver",
"requirements": ["growattServer==1.9.0"]
}

View File

@@ -15,7 +15,6 @@ from ..coordinator import GrowattConfigEntry, GrowattCoordinator
from .inverter import INVERTER_SENSOR_TYPES
from .mix import MIX_SENSOR_TYPES
from .sensor_entity_description import GrowattSensorEntityDescription
from .sph import SPH_SENSOR_TYPES
from .storage import STORAGE_SENSOR_TYPES
from .tlx import TLX_SENSOR_TYPES
from .total import TOTAL_SENSOR_TYPES
@@ -58,8 +57,6 @@ async def async_setup_entry(
sensor_descriptions = list(STORAGE_SENSOR_TYPES)
elif device_coordinator.device_type == "mix":
sensor_descriptions = list(MIX_SENSOR_TYPES)
elif device_coordinator.device_type == "sph":
sensor_descriptions = list(SPH_SENSOR_TYPES)
else:
_LOGGER.debug(
"Device type %s was found but is not supported right now",

View File

@@ -1,291 +0,0 @@
"""Growatt Sensor definitions for the SPH type."""
from __future__ import annotations
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
from homeassistant.const import (
PERCENTAGE,
UnitOfElectricPotential,
UnitOfEnergy,
UnitOfFrequency,
UnitOfPower,
UnitOfTemperature,
)
from .sensor_entity_description import GrowattSensorEntityDescription
SPH_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
# Values from 'sph_detail' API call
GrowattSensorEntityDescription(
key="mix_statement_of_charge",
translation_key="mix_statement_of_charge",
api_key="bmsSOC",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
),
GrowattSensorEntityDescription(
key="mix_battery_voltage",
translation_key="mix_battery_voltage",
api_key="vbat",
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
device_class=SensorDeviceClass.VOLTAGE,
),
GrowattSensorEntityDescription(
key="mix_pv1_voltage",
translation_key="mix_pv1_voltage",
api_key="vpv1",
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
device_class=SensorDeviceClass.VOLTAGE,
),
GrowattSensorEntityDescription(
key="mix_pv2_voltage",
translation_key="mix_pv2_voltage",
api_key="vpv2",
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
device_class=SensorDeviceClass.VOLTAGE,
),
GrowattSensorEntityDescription(
key="mix_grid_voltage",
translation_key="mix_grid_voltage",
api_key="vac1",
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
device_class=SensorDeviceClass.VOLTAGE,
),
GrowattSensorEntityDescription(
key="mix_battery_charge",
translation_key="mix_battery_charge",
api_key="pcharge1",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="mix_battery_discharge_w",
translation_key="mix_battery_discharge_w",
api_key="pdischarge1",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="mix_export_to_grid",
translation_key="mix_export_to_grid",
api_key="pacToGridTotal",
native_unit_of_measurement=UnitOfPower.KILO_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="mix_import_from_grid",
translation_key="mix_import_from_grid",
api_key="pacToUserR",
native_unit_of_measurement=UnitOfPower.KILO_WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="sph_grid_frequency",
translation_key="sph_grid_frequency",
api_key="fac",
native_unit_of_measurement=UnitOfFrequency.HERTZ,
device_class=SensorDeviceClass.FREQUENCY,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="sph_temperature_1",
translation_key="sph_temperature_1",
api_key="temp1",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="sph_temperature_2",
translation_key="sph_temperature_2",
api_key="temp2",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="sph_temperature_3",
translation_key="sph_temperature_3",
api_key="temp3",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="sph_temperature_4",
translation_key="sph_temperature_4",
api_key="temp4",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="sph_temperature_5",
translation_key="sph_temperature_5",
api_key="temp5",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
# Values from 'sph_energy' API call
GrowattSensorEntityDescription(
key="mix_wattage_pv_1",
translation_key="mix_wattage_pv_1",
api_key="ppv1",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="mix_wattage_pv_2",
translation_key="mix_wattage_pv_2",
api_key="ppv2",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="mix_wattage_pv_all",
translation_key="mix_wattage_pv_all",
api_key="ppv",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
GrowattSensorEntityDescription(
key="mix_battery_charge_today",
translation_key="mix_battery_charge_today",
api_key="echarge1Today",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_battery_charge_lifetime",
translation_key="mix_battery_charge_lifetime",
api_key="echarge1Total",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
never_resets=True,
),
GrowattSensorEntityDescription(
key="mix_battery_discharge_today",
translation_key="mix_battery_discharge_today",
api_key="edischarge1Today",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_battery_discharge_lifetime",
translation_key="mix_battery_discharge_lifetime",
api_key="edischarge1Total",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
never_resets=True,
),
GrowattSensorEntityDescription(
key="mix_solar_generation_today",
translation_key="mix_solar_generation_today",
api_key="epvtoday",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_solar_generation_lifetime",
translation_key="mix_solar_generation_lifetime",
api_key="epvTotal",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
never_resets=True,
),
GrowattSensorEntityDescription(
key="mix_system_production_today",
translation_key="mix_system_production_today",
api_key="esystemtoday",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_self_consumption_today",
translation_key="mix_self_consumption_today",
api_key="eselfToday",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_import_from_grid_today",
translation_key="mix_import_from_grid_today",
api_key="etoUserToday",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_export_to_grid_today",
translation_key="mix_export_to_grid_today",
api_key="etoGridToday",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_export_to_grid_lifetime",
translation_key="mix_export_to_grid_lifetime",
api_key="etogridTotal",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
never_resets=True,
),
GrowattSensorEntityDescription(
key="mix_load_consumption_today",
translation_key="mix_load_consumption_today",
api_key="elocalLoadToday",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_load_consumption_lifetime",
translation_key="mix_load_consumption_lifetime",
api_key="elocalLoadTotal",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
never_resets=True,
),
GrowattSensorEntityDescription(
key="mix_load_consumption_battery_today",
translation_key="mix_load_consumption_battery_today",
api_key="echarge1",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
GrowattSensorEntityDescription(
key="mix_load_consumption_solar_today",
translation_key="mix_load_consumption_solar_today",
api_key="eChargeToday",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
# Synthetic timestamp from 'time' field in sph_energy response
GrowattSensorEntityDescription(
key="mix_last_update",
translation_key="mix_last_update",
api_key="lastdataupdate",
device_class=SensorDeviceClass.TIMESTAMP,
),
)

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from datetime import datetime, time
from datetime import datetime
from typing import TYPE_CHECKING, Any
from homeassistant.config_entries import ConfigEntryState
@@ -21,77 +21,67 @@ if TYPE_CHECKING:
from .coordinator import GrowattCoordinator
def _get_coordinators(
hass: HomeAssistant, device_type: str
) -> dict[str, GrowattCoordinator]:
"""Get all coordinators of a given device type with V1 API."""
coordinators: dict[str, GrowattCoordinator] = {}
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.state != ConfigEntryState.LOADED:
continue
for coord in entry.runtime_data.devices.values():
if coord.device_type == device_type and coord.api_version == "v1":
coordinators[coord.device_id] = coord
return coordinators
def _get_coordinator(
hass: HomeAssistant, device_id: str, device_type: str
) -> GrowattCoordinator:
"""Get coordinator by device registry ID and device type."""
coordinators = _get_coordinators(hass, device_type)
if not coordinators:
raise ServiceValidationError(
f"No {device_type.upper()} devices with token authentication are configured. "
f"Services require {device_type.upper()} devices with V1 API access."
)
device_registry = dr.async_get(hass)
device_entry = device_registry.async_get(device_id)
if not device_entry:
raise ServiceValidationError(f"Device '{device_id}' not found")
serial_number = None
for identifier in device_entry.identifiers:
if identifier[0] == DOMAIN:
serial_number = identifier[1]
break
if not serial_number:
raise ServiceValidationError(f"Device '{device_id}' is not a Growatt device")
if serial_number not in coordinators:
raise ServiceValidationError(
f"{device_type.upper()} device '{serial_number}' not found or not configured for services"
)
return coordinators[serial_number]
def _parse_time_str(time_str: str, field_name: str) -> time:
"""Parse a time string (HH:MM or HH:MM:SS) to a datetime.time object."""
parts = time_str.split(":")
if len(parts) not in (2, 3):
raise ServiceValidationError(
f"{field_name} must be in HH:MM or HH:MM:SS format"
)
try:
return datetime.strptime(f"{parts[0]}:{parts[1]}", "%H:%M").time()
except (ValueError, IndexError) as err:
raise ServiceValidationError(
f"{field_name} must be in HH:MM or HH:MM:SS format"
) from err
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register services for Growatt Server integration."""
def get_min_coordinators() -> dict[str, GrowattCoordinator]:
"""Get all MIN coordinators with V1 API from loaded config entries."""
min_coordinators: dict[str, GrowattCoordinator] = {}
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.state != ConfigEntryState.LOADED:
continue
# Add MIN coordinators from this entry
for coord in entry.runtime_data.devices.values():
if coord.device_type == "min" and coord.api_version == "v1":
min_coordinators[coord.device_id] = coord
return min_coordinators
def get_coordinator(device_id: str) -> GrowattCoordinator:
"""Get coordinator by device_id.
Args:
device_id: Device registry ID (not serial number)
"""
# Get current coordinators (they may have changed since service registration)
min_coordinators = get_min_coordinators()
if not min_coordinators:
raise ServiceValidationError(
"No MIN devices with token authentication are configured. "
"Services require MIN devices with V1 API access."
)
# Device registry ID provided - map to serial number
device_registry = dr.async_get(hass)
device_entry = device_registry.async_get(device_id)
if not device_entry:
raise ServiceValidationError(f"Device '{device_id}' not found")
# Extract serial number from device identifiers
serial_number = None
for identifier in device_entry.identifiers:
if identifier[0] == DOMAIN:
serial_number = identifier[1]
break
if not serial_number:
raise ServiceValidationError(
f"Device '{device_id}' is not a Growatt device"
)
# Find coordinator by serial number
if serial_number not in min_coordinators:
raise ServiceValidationError(
f"MIN device '{serial_number}' not found or not configured for services"
)
return min_coordinators[serial_number]
async def handle_update_time_segment(call: ServiceCall) -> None:
"""Handle update_time_segment service call."""
segment_id: int = int(call.data["segment_id"])
@@ -101,11 +91,13 @@ def async_setup_services(hass: HomeAssistant) -> None:
enabled: bool = call.data["enabled"]
device_id: str = call.data["device_id"]
# Validate segment_id range
if not 1 <= segment_id <= 9:
raise ServiceValidationError(
f"segment_id must be between 1 and 9, got {segment_id}"
)
# Validate and convert batt_mode string to integer
valid_modes = {
"load_first": BATT_MODE_LOAD_FIRST,
"battery_first": BATT_MODE_BATTERY_FIRST,
@@ -117,121 +109,50 @@ def async_setup_services(hass: HomeAssistant) -> None:
)
batt_mode: int = valid_modes[batt_mode_str]
start_time = _parse_time_str(start_time_str, "start_time")
end_time = _parse_time_str(end_time_str, "end_time")
# Convert time strings to datetime.time objects
# UI time selector sends HH:MM:SS, but we only need HH:MM (strip seconds)
try:
# Take only HH:MM part (ignore seconds if present)
start_parts = start_time_str.split(":")
start_time_hhmm = f"{start_parts[0]}:{start_parts[1]}"
start_time = datetime.strptime(start_time_hhmm, "%H:%M").time()
except (ValueError, IndexError) as err:
raise ServiceValidationError(
"start_time must be in HH:MM or HH:MM:SS format"
) from err
try:
# Take only HH:MM part (ignore seconds if present)
end_parts = end_time_str.split(":")
end_time_hhmm = f"{end_parts[0]}:{end_parts[1]}"
end_time = datetime.strptime(end_time_hhmm, "%H:%M").time()
except (ValueError, IndexError) as err:
raise ServiceValidationError(
"end_time must be in HH:MM or HH:MM:SS format"
) from err
# Get the appropriate MIN coordinator
coordinator: GrowattCoordinator = get_coordinator(device_id)
coordinator: GrowattCoordinator = _get_coordinator(hass, device_id, "min")
await coordinator.update_time_segment(
segment_id, batt_mode, start_time, end_time, enabled
segment_id,
batt_mode,
start_time,
end_time,
enabled,
)
async def handle_read_time_segments(call: ServiceCall) -> dict[str, Any]:
"""Handle read_time_segments service call."""
coordinator: GrowattCoordinator = _get_coordinator(
hass, call.data["device_id"], "min"
)
device_id: str = call.data["device_id"]
# Get the appropriate MIN coordinator
coordinator: GrowattCoordinator = get_coordinator(device_id)
time_segments: list[dict[str, Any]] = await coordinator.read_time_segments()
return {"time_segments": time_segments}
async def handle_write_ac_charge_times(call: ServiceCall) -> None:
"""Handle write_ac_charge_times service call for SPH devices."""
coordinator: GrowattCoordinator = _get_coordinator(
hass, call.data["device_id"], "sph"
)
# Read current settings first — the SPH API requires all 3 periods in
# every write call. Any period not supplied by the caller is filled in
# from the cache so existing settings are not overwritten with zeros.
current = await coordinator.read_ac_charge_times()
charge_power: int = int(call.data.get("charge_power", current["charge_power"]))
charge_stop_soc: int = int(
call.data.get("charge_stop_soc", current["charge_stop_soc"])
)
mains_enabled: bool = call.data.get("mains_enabled", current["mains_enabled"])
if not 0 <= charge_power <= 100:
raise ServiceValidationError(
f"charge_power must be between 0 and 100, got {charge_power}"
)
if not 0 <= charge_stop_soc <= 100:
raise ServiceValidationError(
f"charge_stop_soc must be between 0 and 100, got {charge_stop_soc}"
)
periods = []
for i in range(1, 4):
cached = current["periods"][i - 1]
start = _parse_time_str(
call.data.get(f"period_{i}_start", cached["start_time"]),
f"period_{i}_start",
)
end = _parse_time_str(
call.data.get(f"period_{i}_end", cached["end_time"]),
f"period_{i}_end",
)
enabled: bool = call.data.get(f"period_{i}_enabled", cached["enabled"])
periods.append({"start_time": start, "end_time": end, "enabled": enabled})
await coordinator.update_ac_charge_times(
charge_power, charge_stop_soc, mains_enabled, periods
)
async def handle_write_ac_discharge_times(call: ServiceCall) -> None:
"""Handle write_ac_discharge_times service call for SPH devices."""
coordinator: GrowattCoordinator = _get_coordinator(
hass, call.data["device_id"], "sph"
)
# Read current settings first — same read-merge-write pattern as charge.
current = await coordinator.read_ac_discharge_times()
discharge_power: int = int(
call.data.get("discharge_power", current["discharge_power"])
)
discharge_stop_soc: int = int(
call.data.get("discharge_stop_soc", current["discharge_stop_soc"])
)
if not 0 <= discharge_power <= 100:
raise ServiceValidationError(
f"discharge_power must be between 0 and 100, got {discharge_power}"
)
if not 0 <= discharge_stop_soc <= 100:
raise ServiceValidationError(
f"discharge_stop_soc must be between 0 and 100, got {discharge_stop_soc}"
)
periods = []
for i in range(1, 4):
cached = current["periods"][i - 1]
start = _parse_time_str(
call.data.get(f"period_{i}_start", cached["start_time"]),
f"period_{i}_start",
)
end = _parse_time_str(
call.data.get(f"period_{i}_end", cached["end_time"]),
f"period_{i}_end",
)
enabled: bool = call.data.get(f"period_{i}_enabled", cached["enabled"])
periods.append({"start_time": start, "end_time": end, "enabled": enabled})
await coordinator.update_ac_discharge_times(
discharge_power, discharge_stop_soc, periods
)
async def handle_read_ac_charge_times(call: ServiceCall) -> dict[str, Any]:
"""Handle read_ac_charge_times service call for SPH devices."""
coordinator: GrowattCoordinator = _get_coordinator(
hass, call.data["device_id"], "sph"
)
return await coordinator.read_ac_charge_times()
async def handle_read_ac_discharge_times(call: ServiceCall) -> dict[str, Any]:
"""Handle read_ac_discharge_times service call for SPH devices."""
coordinator: GrowattCoordinator = _get_coordinator(
hass, call.data["device_id"], "sph"
)
return await coordinator.read_ac_discharge_times()
# Register services without schema - services.yaml will provide UI definition
# Schema validation happens in the handler functions
hass.services.async_register(
@@ -247,31 +168,3 @@ def async_setup_services(hass: HomeAssistant) -> None:
handle_read_time_segments,
supports_response=SupportsResponse.ONLY,
)
hass.services.async_register(
DOMAIN,
"write_ac_charge_times",
handle_write_ac_charge_times,
supports_response=SupportsResponse.NONE,
)
hass.services.async_register(
DOMAIN,
"write_ac_discharge_times",
handle_write_ac_discharge_times,
supports_response=SupportsResponse.NONE,
)
hass.services.async_register(
DOMAIN,
"read_ac_charge_times",
handle_read_ac_charge_times,
supports_response=SupportsResponse.ONLY,
)
hass.services.async_register(
DOMAIN,
"read_ac_discharge_times",
handle_read_ac_discharge_times,
supports_response=SupportsResponse.ONLY,
)

View File

@@ -48,162 +48,3 @@ read_time_segments:
selector:
device:
integration: growatt_server
write_ac_charge_times:
fields:
device_id:
required: true
selector:
device:
integration: growatt_server
charge_power:
required: false
example: 100
selector:
number:
min: 0
max: 100
mode: slider
charge_stop_soc:
required: false
example: 100
selector:
number:
min: 0
max: 100
mode: slider
mains_enabled:
required: false
example: true
selector:
boolean:
period_1_start:
required: false
example: "00:00"
selector:
time:
period_1_end:
required: false
example: "00:00"
selector:
time:
period_1_enabled:
required: false
example: false
selector:
boolean:
period_2_start:
required: false
example: "00:00"
selector:
time:
period_2_end:
required: false
example: "00:00"
selector:
time:
period_2_enabled:
required: false
example: false
selector:
boolean:
period_3_start:
required: false
example: "00:00"
selector:
time:
period_3_end:
required: false
example: "00:00"
selector:
time:
period_3_enabled:
required: false
example: false
selector:
boolean:
write_ac_discharge_times:
fields:
device_id:
required: true
selector:
device:
integration: growatt_server
discharge_power:
required: false
example: 100
selector:
number:
min: 0
max: 100
mode: slider
discharge_stop_soc:
required: false
example: 20
selector:
number:
min: 0
max: 100
mode: slider
period_1_start:
required: false
example: "00:00"
selector:
time:
period_1_end:
required: false
example: "00:00"
selector:
time:
period_1_enabled:
required: false
example: false
selector:
boolean:
period_2_start:
required: false
example: "00:00"
selector:
time:
period_2_end:
required: false
example: "00:00"
selector:
time:
period_2_enabled:
required: false
example: false
selector:
boolean:
period_3_start:
required: false
example: "00:00"
selector:
time:
period_3_end:
required: false
example: "00:00"
selector:
time:
period_3_enabled:
required: false
example: false
selector:
boolean:
read_ac_charge_times:
fields:
device_id:
required: true
selector:
device:
integration: growatt_server
read_ac_discharge_times:
fields:
device_id:
required: true
selector:
device:
integration: growatt_server

View File

@@ -58,14 +58,14 @@
"region": "[%key:component::growatt_server::config::step::password_auth::data_description::region%]",
"token": "The API token for your Growatt account. You can generate one via the Growatt web portal or ShinePhone app."
},
"description": "Token authentication is only supported for MIN/SPH devices. For other device types, please use username/password authentication.",
"description": "Token authentication is only supported for MIN/TLX devices. For other device types, please use username/password authentication.",
"title": "Enter your API token"
},
"user": {
"description": "Note: Token authentication is currently only supported for MIN/SPH devices. For other device types, please use username/password authentication.",
"description": "Note: Token authentication is currently only supported for MIN/TLX devices. For other device types, please use username/password authentication.",
"menu_options": {
"password_auth": "Username/password",
"token_auth": "API token (MIN/SPH only)"
"token_auth": "API token (MIN/TLX only)"
},
"title": "Choose authentication method"
}
@@ -243,24 +243,6 @@
"mix_wattage_pv_all": {
"name": "All PV wattage"
},
"sph_grid_frequency": {
"name": "AC frequency"
},
"sph_temperature_1": {
"name": "Temperature 1"
},
"sph_temperature_2": {
"name": "Temperature 2"
},
"sph_temperature_3": {
"name": "Temperature 3"
},
"sph_temperature_4": {
"name": "Temperature 4"
},
"sph_temperature_5": {
"name": "Temperature 5"
},
"storage_ac_input_frequency_out": {
"name": "AC input frequency"
},
@@ -594,26 +576,6 @@
}
},
"services": {
"read_ac_charge_times": {
"description": "Read AC charge time periods from an SPH device.",
"fields": {
"device_id": {
"description": "The Growatt SPH device to read from.",
"name": "Device"
}
},
"name": "Read AC charge times"
},
"read_ac_discharge_times": {
"description": "Read AC discharge time periods from an SPH device.",
"fields": {
"device_id": {
"description": "[%key:component::growatt_server::services::read_ac_charge_times::fields::device_id::description%]",
"name": "[%key:component::growatt_server::services::read_ac_charge_times::fields::device_id::name%]"
}
},
"name": "Read AC discharge times"
},
"read_time_segments": {
"description": "Read all time segments from a supported inverter.",
"fields": {
@@ -653,118 +615,6 @@
}
},
"name": "Update time segment"
},
"write_ac_charge_times": {
"description": "Write AC charge time periods to an SPH device.",
"fields": {
"charge_power": {
"description": "Charge power limit (%).",
"name": "Charge power"
},
"charge_stop_soc": {
"description": "Stop charging at this state of charge (%).",
"name": "Charge stop SOC"
},
"device_id": {
"description": "[%key:component::growatt_server::services::read_ac_charge_times::fields::device_id::description%]",
"name": "[%key:component::growatt_server::services::read_ac_charge_times::fields::device_id::name%]"
},
"mains_enabled": {
"description": "Enable AC (mains) charging.",
"name": "Mains charging enabled"
},
"period_1_enabled": {
"description": "Enable time period 1.",
"name": "Period 1 enabled"
},
"period_1_end": {
"description": "End time for period 1 (HH:MM or HH:MM:SS).",
"name": "Period 1 end"
},
"period_1_start": {
"description": "Start time for period 1 (HH:MM or HH:MM:SS).",
"name": "Period 1 start"
},
"period_2_enabled": {
"description": "Enable time period 2.",
"name": "Period 2 enabled"
},
"period_2_end": {
"description": "End time for period 2 (HH:MM or HH:MM:SS).",
"name": "Period 2 end"
},
"period_2_start": {
"description": "Start time for period 2 (HH:MM or HH:MM:SS).",
"name": "Period 2 start"
},
"period_3_enabled": {
"description": "Enable time period 3.",
"name": "Period 3 enabled"
},
"period_3_end": {
"description": "End time for period 3 (HH:MM or HH:MM:SS).",
"name": "Period 3 end"
},
"period_3_start": {
"description": "Start time for period 3 (HH:MM or HH:MM:SS).",
"name": "Period 3 start"
}
},
"name": "Write AC charge times"
},
"write_ac_discharge_times": {
"description": "Write AC discharge time periods to an SPH device.",
"fields": {
"device_id": {
"description": "[%key:component::growatt_server::services::read_ac_charge_times::fields::device_id::description%]",
"name": "[%key:component::growatt_server::services::read_ac_charge_times::fields::device_id::name%]"
},
"discharge_power": {
"description": "Discharge power limit (%).",
"name": "Discharge power"
},
"discharge_stop_soc": {
"description": "Stop discharging at this state of charge (%).",
"name": "Discharge stop SOC"
},
"period_1_enabled": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_1_enabled::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_1_enabled::name%]"
},
"period_1_end": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_1_end::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_1_end::name%]"
},
"period_1_start": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_1_start::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_1_start::name%]"
},
"period_2_enabled": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_2_enabled::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_2_enabled::name%]"
},
"period_2_end": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_2_end::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_2_end::name%]"
},
"period_2_start": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_2_start::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_2_start::name%]"
},
"period_3_enabled": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_3_enabled::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_3_enabled::name%]"
},
"period_3_end": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_3_end::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_3_end::name%]"
},
"period_3_start": {
"description": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_3_start::description%]",
"name": "[%key:component::growatt_server::services::write_ac_charge_times::fields::period_3_start::name%]"
}
},
"name": "Write AC discharge times"
}
},
"title": "Growatt Server"

View File

@@ -89,18 +89,18 @@
"step": {
"advanced": {
"data": {
"api_key": "API token",
"api_key": "API Token",
"api_user": "User ID",
"url": "[%key:common::config_flow::data::url%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"api_key": "API token of the Habitica account",
"api_key": "API Token of the Habitica account",
"api_user": "User ID of your Habitica account",
"url": "URL of the Habitica installation to connect to. Defaults to `{default_url}`",
"verify_ssl": "Enable SSL certificate verification for secure connections. Disable only if connecting to a Habitica instance using a self-signed certificate"
},
"description": "You can retrieve your 'User ID' and 'API token' from [**Settings -> Site Data**]({site_data}) on Habitica or the instance you want to connect to",
"description": "You can retrieve your `User ID` and `API Token` from [**Settings -> Site Data**]({site_data}) on Habitica or the instance you want to connect to",
"title": "[%key:component::habitica::config::step::user::menu_options::advanced%]"
},
"login": {
@@ -126,7 +126,7 @@
"api_key": "[%key:component::habitica::config::step::advanced::data_description::api_key%]"
},
"description": "Enter your new API token below. You can find it in Habitica under 'Settings -> Site Data'",
"name": "Re-authorize via API token"
"name": "Re-authorize via API Token"
},
"reauth_login": {
"data": {

View File

@@ -9,21 +9,10 @@ import logging
import os
import re
import struct
from typing import Any, NamedTuple, cast
from typing import Any, NamedTuple
from aiohasupervisor import SupervisorError
from aiohasupervisor.models import (
GreenOptions,
HomeAssistantInfo,
HostInfo,
InstalledAddon,
NetworkInfo,
OSInfo,
RootInfo,
StoreInfo,
SupervisorInfo,
YellowOptions,
)
from aiohasupervisor.models import GreenOptions, YellowOptions # noqa: F401
import voluptuous as vol
from homeassistant.auth.const import GROUP_ID_ADMIN
@@ -76,7 +65,7 @@ from . import ( # noqa: F401
system_health,
update,
)
from .addon_manager import AddonError, AddonInfo, AddonManager, AddonState
from .addon_manager import AddonError, AddonInfo, AddonManager, AddonState # noqa: F401
from .addon_panel import async_setup_addon_panel
from .auth import async_setup_auth_view
from .config import HassioConfig
@@ -93,9 +82,7 @@ from .const import (
ATTR_INPUT,
ATTR_LOCATION,
ATTR_PASSWORD,
ATTR_REPOSITORIES,
ATTR_SLUG,
DATA_ADDONS_LIST,
DATA_COMPONENT,
DATA_CONFIG_STORE,
DATA_CORE_INFO,
@@ -113,21 +100,18 @@ from .const import (
from .coordinator import (
HassioDataUpdateCoordinator,
get_addons_info,
get_addons_list,
get_addons_stats,
get_core_info,
get_core_stats,
get_host_info,
get_addons_stats, # noqa: F401
get_core_info, # noqa: F401
get_core_stats, # noqa: F401
get_host_info, # noqa: F401
get_info,
get_issues_info,
get_network_info,
get_issues_info, # noqa: F401
get_os_info,
get_store,
get_supervisor_info,
get_supervisor_stats,
get_supervisor_info, # noqa: F401
get_supervisor_stats, # noqa: F401
)
from .discovery import async_setup_discovery_view
from .handler import (
from .handler import ( # noqa: F401
HassIO,
HassioAPIError,
async_update_diagnostics,
@@ -138,35 +122,6 @@ from .ingress import async_setup_ingress_view
from .issues import SupervisorIssues
from .websocket_api import async_load_websocket_api
# Expose the future safe name now so integrations can use it
# All references to addons will eventually be refactored and deprecated
get_apps_list = get_addons_list
__all__ = [
"AddonError",
"AddonInfo",
"AddonManager",
"AddonState",
"GreenOptions",
"SupervisorError",
"YellowOptions",
"async_update_diagnostics",
"get_addons_info",
"get_addons_list",
"get_addons_stats",
"get_apps_list",
"get_core_info",
"get_core_stats",
"get_host_info",
"get_info",
"get_issues_info",
"get_network_info",
"get_os_info",
"get_store",
"get_supervisor_client",
"get_supervisor_info",
"get_supervisor_stats",
]
_LOGGER = logging.getLogger(__name__)
@@ -549,55 +504,27 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
try:
(
root_info,
host_info,
hass.data[DATA_INFO],
hass.data[DATA_HOST_INFO],
store_info,
homeassistant_info,
supervisor_info,
os_info,
network_info,
addons_list,
) = cast(
tuple[
RootInfo,
HostInfo,
StoreInfo,
HomeAssistantInfo,
SupervisorInfo,
OSInfo,
NetworkInfo,
list[InstalledAddon],
],
await asyncio.gather(
create_eager_task(supervisor_client.info()),
create_eager_task(supervisor_client.host.info()),
create_eager_task(supervisor_client.store.info()),
create_eager_task(supervisor_client.homeassistant.info()),
create_eager_task(supervisor_client.supervisor.info()),
create_eager_task(supervisor_client.os.info()),
create_eager_task(supervisor_client.network.info()),
create_eager_task(supervisor_client.addons.list()),
),
hass.data[DATA_CORE_INFO],
hass.data[DATA_SUPERVISOR_INFO],
hass.data[DATA_OS_INFO],
hass.data[DATA_NETWORK_INFO],
) = await asyncio.gather(
create_eager_task(hassio.get_info()),
create_eager_task(hassio.get_host_info()),
create_eager_task(supervisor_client.store.info()),
create_eager_task(hassio.get_core_info()),
create_eager_task(hassio.get_supervisor_info()),
create_eager_task(hassio.get_os_info()),
create_eager_task(hassio.get_network_info()),
)
except SupervisorError as err:
except HassioAPIError as err:
_LOGGER.warning("Can't read Supervisor data: %s", err)
else:
hass.data[DATA_INFO] = root_info.to_dict()
hass.data[DATA_HOST_INFO] = host_info.to_dict()
hass.data[DATA_STORE] = store_info.to_dict()
hass.data[DATA_CORE_INFO] = homeassistant_info.to_dict()
hass.data[DATA_SUPERVISOR_INFO] = supervisor_info.to_dict()
hass.data[DATA_OS_INFO] = os_info.to_dict()
hass.data[DATA_NETWORK_INFO] = network_info.to_dict()
hass.data[DATA_ADDONS_LIST] = [addon.to_dict() for addon in addons_list]
# Deprecated 2026.4.0: Folding repositories and addons.list results into supervisor_info for compatibility
# Can drop this after removal period
hass.data[DATA_SUPERVISOR_INFO]["repositories"] = hass.data[DATA_STORE][
ATTR_REPOSITORIES
]
hass.data[DATA_SUPERVISOR_INFO]["addons"] = hass.data[DATA_ADDONS_LIST]
async_call_later(
hass,

View File

@@ -204,17 +204,8 @@ class SupervisorBackupAgent(BackupAgent):
location={self.location},
filename=PurePath(suggested_backup_filename(backup)),
)
async def stream_with_progress() -> AsyncIterator[bytes]:
"""Wrap stream to track upload progress."""
bytes_uploaded = 0
async for chunk in stream:
bytes_uploaded += len(chunk)
on_progress(bytes_uploaded=bytes_uploaded)
yield chunk
await self._client.backups.upload_backup(
stream_with_progress(),
stream,
upload_options,
)

View File

@@ -93,7 +93,6 @@ DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
DATA_ADDONS_INFO = "hassio_addons_info"
DATA_ADDONS_STATS = "hassio_addons_stats"
DATA_ADDONS_LIST = "hassio_addons_list"
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
ATTR_AUTO_UPDATE = "auto_update"
@@ -107,7 +106,6 @@ ATTR_STATE = "state"
ATTR_STARTED = "started"
ATTR_URL = "url"
ATTR_REPOSITORY = "repository"
ATTR_REPOSITORIES = "repositories"
DATA_KEY_ADDONS = "addons"
DATA_KEY_OS = "os"

View File

@@ -4,20 +4,13 @@ from __future__ import annotations
import asyncio
from collections import defaultdict
from collections.abc import Awaitable
from copy import deepcopy
import logging
from typing import TYPE_CHECKING, Any, cast
from typing import TYPE_CHECKING, Any
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
from aiohasupervisor.models import (
AddonState,
CIFSMountResponse,
InstalledAddon,
NFSMountResponse,
StoreInfo,
)
from aiohasupervisor.models.base import ResponseData
from aiohasupervisor.models import StoreInfo
from aiohasupervisor.models.mounts import CIFSMountResponse, NFSMountResponse
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
@@ -30,16 +23,16 @@ from homeassistant.loader import bind_hass
from .const import (
ATTR_AUTO_UPDATE,
ATTR_REPOSITORIES,
ATTR_REPOSITORY,
ATTR_SLUG,
ATTR_STARTED,
ATTR_STATE,
ATTR_URL,
ATTR_VERSION,
CONTAINER_INFO,
CONTAINER_STATS,
CORE_CONTAINER,
DATA_ADDONS_INFO,
DATA_ADDONS_LIST,
DATA_ADDONS_STATS,
DATA_COMPONENT,
DATA_CORE_INFO,
@@ -64,7 +57,7 @@ from .const import (
SUPERVISOR_CONTAINER,
SupervisorEntityModel,
)
from .handler import get_supervisor_client
from .handler import HassioAPIError, get_supervisor_client
from .jobs import SupervisorJobs
if TYPE_CHECKING:
@@ -125,7 +118,7 @@ def get_network_info(hass: HomeAssistant) -> dict[str, Any] | None:
@callback
@bind_hass
def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any] | None] | None:
def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any]] | None:
"""Return Addons info.
Async friendly.
@@ -133,18 +126,9 @@ def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any] | None] | N
return hass.data.get(DATA_ADDONS_INFO)
@callback
def get_addons_list(hass: HomeAssistant) -> list[dict[str, Any]] | None:
"""Return list of installed addons and subset of details for each.
Async friendly.
"""
return hass.data.get(DATA_ADDONS_LIST)
@callback
@bind_hass
def get_addons_stats(hass: HomeAssistant) -> dict[str, dict[str, Any] | None]:
def get_addons_stats(hass: HomeAssistant) -> dict[str, Any]:
"""Return Addons stats.
Async friendly.
@@ -357,7 +341,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
try:
await self.force_data_refresh(is_first_update)
except SupervisorError as err:
except HassioAPIError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
new_data: dict[str, Any] = {}
@@ -366,7 +350,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
addons_stats = get_addons_stats(self.hass)
store_data = get_store(self.hass)
mounts_info = await self.supervisor_client.mounts.info()
addons_list = get_addons_list(self.hass) or []
if store_data:
repositories = {
@@ -377,17 +360,17 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
repositories = {}
new_data[DATA_KEY_ADDONS] = {
(slug := addon[ATTR_SLUG]): {
addon[ATTR_SLUG]: {
**addon,
**(addons_stats.get(slug) or {}),
ATTR_AUTO_UPDATE: (addons_info.get(slug) or {}).get(
**((addons_stats or {}).get(addon[ATTR_SLUG]) or {}),
ATTR_AUTO_UPDATE: (addons_info.get(addon[ATTR_SLUG]) or {}).get(
ATTR_AUTO_UPDATE, False
),
ATTR_REPOSITORY: repositories.get(
repo_slug := addon.get(ATTR_REPOSITORY, ""), repo_slug
addon.get(ATTR_REPOSITORY), addon.get(ATTR_REPOSITORY, "")
),
}
for addon in addons_list
for addon in supervisor_info.get("addons", [])
}
if self.is_hass_os:
new_data[DATA_KEY_OS] = get_os_info(self.hass)
@@ -479,48 +462,32 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
container_updates = self._container_updates
data = self.hass.data
client = self.supervisor_client
updates: dict[str, Awaitable[ResponseData]] = {
DATA_INFO: client.info(),
DATA_CORE_INFO: client.homeassistant.info(),
DATA_SUPERVISOR_INFO: client.supervisor.info(),
DATA_OS_INFO: client.os.info(),
DATA_STORE: client.store.info(),
hassio = self.hassio
updates = {
DATA_INFO: hassio.get_info(),
DATA_CORE_INFO: hassio.get_core_info(),
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
DATA_OS_INFO: hassio.get_os_info(),
}
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
updates[DATA_CORE_STATS] = client.homeassistant.stats()
updates[DATA_CORE_STATS] = hassio.get_core_stats()
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats()
updates[DATA_SUPERVISOR_STATS] = hassio.get_supervisor_stats()
# Pull off addons.list results for further processing before caching
addons_list, *results = await asyncio.gather(
client.addons.list(), *updates.values()
)
for key, result in zip(updates, cast(list[ResponseData], results), strict=True):
data[key] = result.to_dict()
installed_addons = cast(list[InstalledAddon], addons_list)
data[DATA_ADDONS_LIST] = [addon.to_dict() for addon in installed_addons]
# Deprecated 2026.4.0: Folding repositories and addons.list results into supervisor_info for compatibility
# Can drop this after removal period
data[DATA_SUPERVISOR_INFO].update(
{
"repositories": data[DATA_STORE][ATTR_REPOSITORIES],
"addons": [addon.to_dict() for addon in installed_addons],
}
)
all_addons = {addon.slug for addon in installed_addons}
started_addons = {
addon.slug
for addon in installed_addons
if addon.state in {AddonState.STARTED, AddonState.STARTUP}
}
results = await asyncio.gather(*updates.values())
for key, result in zip(updates, results, strict=False):
data[key] = result
_addon_data = data[DATA_SUPERVISOR_INFO].get("addons", [])
all_addons: list[str] = []
started_addons: list[str] = []
for addon in _addon_data:
slug = addon[ATTR_SLUG]
all_addons.append(slug)
if addon[ATTR_STATE] == ATTR_STARTED:
started_addons.append(slug)
#
# Update addon info if its the first update or
# Update add-on info if its the first update or
# there is at least one entity that needs the data.
#
# When entities are added they call async_enable_container_updates
@@ -547,12 +514,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
),
):
container_data: dict[str, Any] = data.setdefault(data_key, {})
# Clean up cache
for slug in container_data.keys() - wanted_addons:
del container_data[slug]
# Update cache from API
container_data.update(
dict(
await asyncio.gather(
@@ -579,7 +540,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
return (slug, stats.to_dict())
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Return the info for an addon."""
"""Return the info for an add-on."""
try:
info = await self.supervisor_client.addons.addon_info(slug)
except SupervisorError as err:

View File

@@ -87,6 +87,70 @@ class HassIO:
"""Return base url for Supervisor."""
return self._base_url
@api_data
def get_info(self) -> Coroutine:
"""Return generic Supervisor information.
This method returns a coroutine.
"""
return self.send_command("/info", method="get")
@api_data
def get_host_info(self) -> Coroutine:
"""Return data for Host.
This method returns a coroutine.
"""
return self.send_command("/host/info", method="get")
@api_data
def get_os_info(self) -> Coroutine:
"""Return data for the OS.
This method returns a coroutine.
"""
return self.send_command("/os/info", method="get")
@api_data
def get_core_info(self) -> Coroutine:
"""Return data for Home Asssistant Core.
This method returns a coroutine.
"""
return self.send_command("/core/info", method="get")
@api_data
def get_supervisor_info(self) -> Coroutine:
"""Return data for the Supervisor.
This method returns a coroutine.
"""
return self.send_command("/supervisor/info", method="get")
@api_data
def get_network_info(self) -> Coroutine:
"""Return data for the Host Network.
This method returns a coroutine.
"""
return self.send_command("/network/info", method="get")
@api_data
def get_core_stats(self) -> Coroutine:
"""Return stats for the core.
This method returns a coroutine.
"""
return self.send_command("/core/stats", method="get")
@api_data
def get_supervisor_stats(self) -> Coroutine:
"""Return stats for the supervisor.
This method returns a coroutine.
"""
return self.send_command("/supervisor/stats", method="get")
@api_data
def get_ingress_panels(self) -> Coroutine:
"""Return data for Add-on ingress panels.

View File

@@ -17,7 +17,6 @@ from aiohasupervisor.models import (
UnsupportedReason,
)
from homeassistant.const import ATTR_NAME
from homeassistant.core import HassJob, HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import async_call_later
@@ -31,7 +30,6 @@ from .const import (
ADDONS_COORDINATOR,
ATTR_DATA,
ATTR_HEALTHY,
ATTR_SLUG,
ATTR_STARTUP,
ATTR_SUPPORTED,
ATTR_UNHEALTHY_REASONS,
@@ -61,7 +59,7 @@ from .const import (
STARTUP_COMPLETE,
UPDATE_KEY_SUPERVISOR,
)
from .coordinator import HassioDataUpdateCoordinator, get_addons_list, get_host_info
from .coordinator import HassioDataUpdateCoordinator, get_addons_info, get_host_info
from .handler import HassIO, get_supervisor_client
ISSUE_KEY_UNHEALTHY = "unhealthy"
@@ -267,18 +265,23 @@ class SupervisorIssues:
placeholders[PLACEHOLDER_KEY_ADDON_URL] = (
f"/hassio/addon/{issue.reference}"
)
addons_list = get_addons_list(self._hass) or []
placeholders[PLACEHOLDER_KEY_ADDON] = issue.reference
for addon in addons_list:
if addon[ATTR_SLUG] == issue.reference:
placeholders[PLACEHOLDER_KEY_ADDON] = addon[ATTR_NAME]
break
addons = get_addons_info(self._hass)
if addons and issue.reference in addons:
placeholders[PLACEHOLDER_KEY_ADDON] = addons[issue.reference][
"name"
]
else:
placeholders[PLACEHOLDER_KEY_ADDON] = issue.reference
elif issue.key == ISSUE_KEY_SYSTEM_FREE_SPACE:
host_info = get_host_info(self._hass)
if host_info and "disk_free" in host_info:
if (
host_info
and "data" in host_info
and "disk_free" in host_info["data"]
):
placeholders[PLACEHOLDER_KEY_FREE_SPACE] = str(
host_info["disk_free"]
host_info["data"]["disk_free"]
)
else:
placeholders[PLACEHOLDER_KEY_FREE_SPACE] = "<2"

View File

@@ -11,13 +11,11 @@ from aiohasupervisor.models import ContextType
import voluptuous as vol
from homeassistant.components.repairs import RepairsFlow
from homeassistant.const import ATTR_NAME
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResult
from . import get_addons_list, get_issues_info
from . import get_addons_info, get_issues_info
from .const import (
ATTR_SLUG,
EXTRA_PLACEHOLDERS,
ISSUE_KEY_ADDON_BOOT_FAIL,
ISSUE_KEY_ADDON_DEPRECATED,
@@ -156,7 +154,7 @@ class DockerConfigIssueRepairFlow(SupervisorIssueRepairFlow):
placeholders = {PLACEHOLDER_KEY_COMPONENTS: ""}
supervisor_issues = get_issues_info(self.hass)
if supervisor_issues and self.issue:
addons_list = get_addons_list(self.hass) or []
addons = get_addons_info(self.hass) or {}
components: list[str] = []
for issue in supervisor_issues.issues:
if issue.key == self.issue.key or issue.type != self.issue.type:
@@ -168,9 +166,9 @@ class DockerConfigIssueRepairFlow(SupervisorIssueRepairFlow):
components.append(
next(
(
addon[ATTR_NAME]
for addon in addons_list
if addon[ATTR_SLUG] == issue.reference
info["name"]
for slug, info in addons.items()
if slug == issue.reference
),
issue.reference or "",
)
@@ -189,12 +187,13 @@ class AddonIssueRepairFlow(SupervisorIssueRepairFlow):
"""Get description placeholders for steps."""
placeholders: dict[str, str] = super().description_placeholders or {}
if self.issue and self.issue.reference:
addons_list = get_addons_list(self.hass) or []
placeholders[PLACEHOLDER_KEY_ADDON] = self.issue.reference
for addon in addons_list:
if addon[ATTR_SLUG] == self.issue.reference:
placeholders[PLACEHOLDER_KEY_ADDON] = addon[ATTR_NAME]
break
addons = get_addons_info(self.hass)
if addons and self.issue.reference in addons:
placeholders[PLACEHOLDER_KEY_ADDON] = addons[self.issue.reference][
"name"
]
else:
placeholders[PLACEHOLDER_KEY_ADDON] = self.issue.reference
return placeholders or None

View File

@@ -9,7 +9,6 @@ from homeassistant.components import system_health
from homeassistant.core import HomeAssistant, callback
from .coordinator import (
get_addons_list,
get_host_info,
get_info,
get_network_info,
@@ -36,7 +35,6 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
host_info = get_host_info(hass) or {}
supervisor_info = get_supervisor_info(hass)
network_info = get_network_info(hass) or {}
addons_list = get_addons_list(hass) or []
healthy: bool | dict[str, str]
if supervisor_info is not None and supervisor_info.get("healthy"):
@@ -86,8 +84,6 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
os_info = get_os_info(hass) or {}
information["board"] = os_info.get("board")
# Not using aiohasupervisor for ping call below intentionally. Given system health
# context, it seems preferable to do this check with minimal dependencies
information["supervisor_api"] = system_health.async_check_can_reach_url(
hass,
SUPERVISOR_PING.format(ip_address=ip_address),
@@ -99,7 +95,8 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
)
information["installed_addons"] = ", ".join(
f"{addon['name']} ({addon['version']})" for addon in addons_list
f"{addon['name']} ({addon['version']})"
for addon in (supervisor_info or {}).get("addons", [])
)
return information

View File

@@ -39,7 +39,7 @@ from .const import (
WS_TYPE_EVENT,
WS_TYPE_SUBSCRIBE,
)
from .coordinator import get_addons_list
from .coordinator import get_supervisor_info
from .update_helper import update_addon, update_core
SCHEMA_WEBSOCKET_EVENT = vol.Schema(
@@ -168,8 +168,8 @@ async def websocket_update_addon(
"""Websocket handler to update an addon."""
addon_name: str | None = None
addon_version: str | None = None
addons_list: list[dict[str, Any]] = get_addons_list(hass) or []
for addon in addons_list:
addons: list = (get_supervisor_info(hass) or {}).get("addons", [])
for addon in addons:
if addon[ATTR_SLUG] == msg["addon"]:
addon_name = addon[ATTR_NAME]
addon_version = addon[ATTR_VERSION]

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from apyhiveapi import Auth
@@ -27,8 +26,6 @@ from homeassistant.core import callback
from . import HiveConfigEntry
from .const import CONF_CODE, CONF_DEVICE_NAME, CONFIG_ENTRY_VERSION, DOMAIN
_LOGGER = logging.getLogger(__name__)
class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a Hive config flow."""
@@ -39,7 +36,7 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize the config flow."""
self.data: dict[str, Any] = {}
self.tokens: dict[str, Any] = {}
self.tokens: dict[str, str] = {}
self.device_registration: bool = False
self.device_name = "Home Assistant"
@@ -70,22 +67,11 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
except HiveApiError:
errors["base"] = "no_internet_available"
if (
auth_result := self.tokens.get("AuthenticationResult", {})
) and auth_result.get("NewDeviceMetadata"):
_LOGGER.debug("Login successful, New device detected")
self.device_registration = True
return await self.async_step_configuration()
if self.tokens.get("ChallengeName") == "SMS_MFA":
_LOGGER.debug("Login successful, SMS 2FA required")
# Complete SMS 2FA.
return await self.async_step_2fa()
if not errors:
_LOGGER.debug(
"Login successful, no new device detected, no 2FA required"
)
# Complete the entry.
try:
return await self.async_setup_hive_entry()
@@ -117,7 +103,6 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
errors["base"] = "no_internet_available"
if not errors:
_LOGGER.debug("2FA successful")
if self.source == SOURCE_REAUTH:
return await self.async_setup_hive_entry()
self.device_registration = True
@@ -134,11 +119,10 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
if user_input:
if self.device_registration:
_LOGGER.debug("Attempting to register device")
self.device_name = user_input["device_name"]
await self.hive_auth.device_registration(user_input["device_name"])
self.data["device_data"] = await self.hive_auth.get_device_data()
_LOGGER.debug("Device registration successful")
try:
return await self.async_setup_hive_entry()
except UnknownHiveError:
@@ -158,7 +142,6 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
raise UnknownHiveError
# Setup the config entry
_LOGGER.debug("Setting up Hive entry")
self.data["tokens"] = self.tokens
if self.source == SOURCE_REAUTH:
return self.async_update_reload_and_abort(
@@ -177,7 +160,6 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
CONF_USERNAME: entry_data[CONF_USERNAME],
CONF_PASSWORD: entry_data[CONF_PASSWORD],
}
_LOGGER.debug("Reauthenticating user")
return await self.async_step_user(data)
@staticmethod

View File

@@ -63,7 +63,6 @@ BSH_DOOR_STATE_OPEN = "BSH.Common.EnumType.DoorState.Open"
SERVICE_SET_PROGRAM_AND_OPTIONS = "set_program_and_options"
SERVICE_SETTING = "change_setting"
SERVICE_START_SELECTED_PROGRAM = "start_selected_program"
ATTR_AFFECTS_TO = "affects_to"
ATTR_KEY = "key"

View File

@@ -245,10 +245,25 @@
"change_setting": {
"service": "mdi:cog"
},
"pause_program": {
"service": "mdi:pause"
},
"resume_program": {
"service": "mdi:play-pause"
},
"select_program": {
"service": "mdi:form-select"
},
"set_option_active": {
"service": "mdi:gesture-tap"
},
"set_option_selected": {
"service": "mdi:gesture-tap"
},
"set_program_and_options": {
"service": "mdi:form-select"
},
"start_selected_program": {
"start_program": {
"service": "mdi:play"
}
}

View File

@@ -23,6 +23,6 @@
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],
"quality_scale": "platinum",
"requirements": ["aiohomeconnect==0.32.0"],
"requirements": ["aiohomeconnect==0.30.0"],
"zeroconf": ["_homeconnect._tcp.local."]
}

View File

@@ -13,7 +13,7 @@ from aiohomeconnect.model import (
ProgramKey,
SettingKey,
)
from aiohomeconnect.model.error import HomeConnectError, NoProgramActiveError
from aiohomeconnect.model.error import HomeConnectError
import voluptuous as vol
from homeassistant.const import ATTR_DEVICE_ID
@@ -32,7 +32,6 @@ from .const import (
PROGRAM_ENUM_OPTIONS,
SERVICE_SET_PROGRAM_AND_OPTIONS,
SERVICE_SETTING,
SERVICE_START_SELECTED_PROGRAM,
TRANSLATION_KEYS_PROGRAMS_MAP,
)
from .coordinator import HomeConnectConfigEntry
@@ -125,23 +124,7 @@ SERVICE_PROGRAM_AND_OPTIONS_SCHEMA = vol.All(
_require_program_or_at_least_one_option,
)
SERVICE_START_SELECTED_PROGRAM_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): str,
}
).extend(
{
vol.Optional(translation_key): schema
for translation_key, (key, schema) in PROGRAM_OPTIONS.items()
if key
in (
OptionKey.BSH_COMMON_START_IN_RELATIVE,
OptionKey.BSH_COMMON_FINISH_IN_RELATIVE,
)
}
)
)
SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str})
async def _get_client_and_ha_id(
@@ -279,50 +262,6 @@ async def async_service_set_program_and_options(call: ServiceCall) -> None:
) from err
async def async_service_start_selected_program(call: ServiceCall) -> None:
"""Service to start a program that is already selected."""
data = dict(call.data)
client, ha_id = await _get_client_and_ha_id(call.hass, data.pop(ATTR_DEVICE_ID))
try:
try:
program_obj = await client.get_active_program(ha_id)
except NoProgramActiveError:
program_obj = await client.get_selected_program(ha_id)
except HomeConnectError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="fetch_program_error",
translation_placeholders=get_dict_from_home_connect_error(err),
) from err
if not program_obj.key:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="no_program_to_start",
)
program = program_obj.key
options_dict = {option.key: option for option in program_obj.options or []}
for option, value in data.items():
option_key = PROGRAM_OPTIONS[option][0]
options_dict[option_key] = Option(option_key, value)
try:
await client.start_program(
ha_id,
program_key=program,
options=list(options_dict.values()) if options_dict else None,
)
except HomeConnectError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="start_program",
translation_placeholders={
"program": program,
**get_dict_from_home_connect_error(err),
},
) from err
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register custom actions."""
@@ -336,9 +275,3 @@ def async_setup_services(hass: HomeAssistant) -> None:
async_service_set_program_and_options,
schema=SERVICE_PROGRAM_AND_OPTIONS_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_START_SELECTED_PROGRAM,
async_service_start_selected_program,
schema=SERVICE_START_SELECTED_PROGRAM_SCHEMA,
)

View File

@@ -127,7 +127,6 @@ set_program_and_options:
- cooking_oven_program_heating_mode_top_bottom_heating
- cooking_oven_program_heating_mode_top_bottom_heating_eco
- cooking_oven_program_heating_mode_bottom_heating
- cooking_oven_program_heating_mode_bread_baking
- cooking_oven_program_heating_mode_pizza_setting
- cooking_oven_program_heating_mode_slow_cook
- cooking_oven_program_heating_mode_intensive_heat
@@ -136,7 +135,6 @@ set_program_and_options:
- cooking_oven_program_heating_mode_frozen_heatup_special
- cooking_oven_program_heating_mode_desiccation
- cooking_oven_program_heating_mode_defrost
- cooking_oven_program_heating_mode_dough_proving
- cooking_oven_program_heating_mode_proof
- cooking_oven_program_heating_mode_hot_air_30_steam
- cooking_oven_program_heating_mode_hot_air_60_steam
@@ -680,29 +678,3 @@ change_setting:
required: true
selector:
object:
start_selected_program:
fields:
device_id:
required: true
selector:
device:
integration: home_connect
b_s_h_common_option_finish_in_relative:
example: 3600
required: false
selector:
number:
min: 0
step: 1
mode: box
unit_of_measurement: s
b_s_h_common_option_start_in_relative:
example: 3600
required: false
selector:
number:
min: 0
step: 1
mode: box
unit_of_measurement: s

View File

@@ -261,10 +261,8 @@
"cooking_common_program_hood_delayed_shut_off": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_delayed_shut_off%]",
"cooking_common_program_hood_venting": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_venting%]",
"cooking_oven_program_heating_mode_bottom_heating": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_bottom_heating%]",
"cooking_oven_program_heating_mode_bread_baking": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_bread_baking%]",
"cooking_oven_program_heating_mode_defrost": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_defrost%]",
"cooking_oven_program_heating_mode_desiccation": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_desiccation%]",
"cooking_oven_program_heating_mode_dough_proving": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_dough_proving%]",
"cooking_oven_program_heating_mode_frozen_heatup_special": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_frozen_heatup_special%]",
"cooking_oven_program_heating_mode_hot_air": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air%]",
"cooking_oven_program_heating_mode_hot_air_100_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_100_steam%]",
@@ -617,10 +615,8 @@
"cooking_common_program_hood_delayed_shut_off": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_delayed_shut_off%]",
"cooking_common_program_hood_venting": "[%key:component::home_connect::selector::programs::options::cooking_common_program_hood_venting%]",
"cooking_oven_program_heating_mode_bottom_heating": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_bottom_heating%]",
"cooking_oven_program_heating_mode_bread_baking": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_bread_baking%]",
"cooking_oven_program_heating_mode_defrost": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_defrost%]",
"cooking_oven_program_heating_mode_desiccation": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_desiccation%]",
"cooking_oven_program_heating_mode_dough_proving": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_dough_proving%]",
"cooking_oven_program_heating_mode_frozen_heatup_special": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_frozen_heatup_special%]",
"cooking_oven_program_heating_mode_hot_air": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air%]",
"cooking_oven_program_heating_mode_hot_air_100_steam": "[%key:component::home_connect::selector::programs::options::cooking_oven_program_heating_mode_hot_air_100_steam%]",
@@ -1344,12 +1340,6 @@
"fetch_api_error": {
"message": "Error obtaining data from the API: {error}"
},
"fetch_program_error": {
"message": "Error obtaining the selected or active program: {error}"
},
"no_program_to_start": {
"message": "No program to start"
},
"oauth2_implementation_unavailable": {
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
},
@@ -1622,10 +1612,8 @@
"cooking_common_program_hood_delayed_shut_off": "Delayed shut off",
"cooking_common_program_hood_venting": "Venting",
"cooking_oven_program_heating_mode_bottom_heating": "Bottom heating",
"cooking_oven_program_heating_mode_bread_baking": "Bread baking",
"cooking_oven_program_heating_mode_defrost": "Defrost",
"cooking_oven_program_heating_mode_desiccation": "Desiccation",
"cooking_oven_program_heating_mode_dough_proving": "Dough proving",
"cooking_oven_program_heating_mode_frozen_heatup_special": "Special heat-up for frozen products",
"cooking_oven_program_heating_mode_hot_air": "Hot air",
"cooking_oven_program_heating_mode_hot_air_100_steam": "Hot air + 100 RH",
@@ -2084,24 +2072,6 @@
"name": "Washer options"
}
}
},
"start_selected_program": {
"description": "Starts the already selected program. You can update start-only options to start the program with them or modify them on a program that is already active with a delayed start.",
"fields": {
"b_s_h_common_option_finish_in_relative": {
"description": "[%key:component::home_connect::services::set_program_and_options::fields::b_s_h_common_option_finish_in_relative::description%]",
"name": "[%key:component::home_connect::services::set_program_and_options::fields::b_s_h_common_option_finish_in_relative::name%]"
},
"b_s_h_common_option_start_in_relative": {
"description": "[%key:component::home_connect::services::set_program_and_options::fields::b_s_h_common_option_start_in_relative::description%]",
"name": "[%key:component::home_connect::services::set_program_and_options::fields::b_s_h_common_option_start_in_relative::name%]"
},
"device_id": {
"description": "[%key:component::home_connect::services::set_program_and_options::fields::device_id::description%]",
"name": "[%key:component::home_connect::services::set_program_and_options::fields::device_id::name%]"
}
},
"name": "Start selected program"
}
}
}

View File

@@ -965,7 +965,7 @@ class HKDevice:
# visible on the network.
self.async_set_available_state(False)
return
except AccessoryDisconnectedError, EncryptionError, TimeoutError:
except AccessoryDisconnectedError, EncryptionError:
# Temporary connection failure. Device may still available but our
# connection was dropped or we are reconnecting
self._poll_failures += 1

View File

@@ -11,14 +11,10 @@ from homematicip.base.enums import (
OpticalSignalBehaviour,
RGBColorState,
)
from homematicip.base.functionalChannels import (
NotificationLightChannel,
NotificationMp3SoundChannel,
)
from homematicip.base.functionalChannels import NotificationLightChannel
from homematicip.device import (
BrandDimmer,
BrandSwitchNotificationLight,
CombinationSignallingDevice,
Device,
Dimmer,
DinRailDimmer3,
@@ -112,8 +108,6 @@ async def async_setup_entry(
entities.append(
HomematicipOpticalSignalLight(hap, device, ch.index, led_number)
)
elif isinstance(device, CombinationSignallingDevice):
entities.append(HomematicipCombinationSignallingLight(hap, device))
async_add_entities(entities)
@@ -592,70 +586,3 @@ class HomematicipOpticalSignalLight(HomematicipGenericEntity, LightEntity):
rgb=simple_rgb_color,
dimLevel=0.0,
)
class HomematicipCombinationSignallingLight(HomematicipGenericEntity, LightEntity):
"""Representation of the HomematicIP combination signalling device light (HmIP-MP3P)."""
_attr_color_mode = ColorMode.HS
_attr_supported_color_modes = {ColorMode.HS}
_color_switcher: dict[str, tuple[float, float]] = {
RGBColorState.WHITE: (0.0, 0.0),
RGBColorState.RED: (0.0, 100.0),
RGBColorState.YELLOW: (60.0, 100.0),
RGBColorState.GREEN: (120.0, 100.0),
RGBColorState.TURQUOISE: (180.0, 100.0),
RGBColorState.BLUE: (240.0, 100.0),
RGBColorState.PURPLE: (300.0, 100.0),
}
def __init__(
self, hap: HomematicipHAP, device: CombinationSignallingDevice
) -> None:
"""Initialize the combination signalling light entity."""
super().__init__(hap, device, channel=1, is_multi_channel=False)
@property
def _func_channel(self) -> NotificationMp3SoundChannel:
return self._device.functionalChannels[self._channel]
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._func_channel.on
@property
def brightness(self) -> int:
"""Return the brightness of this light between 0..255."""
return int((self._func_channel.dimLevel or 0.0) * 255)
@property
def hs_color(self) -> tuple[float, float]:
"""Return the hue and saturation color value [float, float]."""
simple_rgb_color = self._func_channel.simpleRGBColorState
return self._color_switcher.get(simple_rgb_color, (0.0, 0.0))
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the light on."""
hs_color = kwargs.get(ATTR_HS_COLOR, self.hs_color)
simple_rgb_color = _convert_color(hs_color)
brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness)
# Default to full brightness when no kwargs given
if not kwargs:
brightness = 255
# Minimum brightness is 10, otherwise the LED is disabled
brightness = max(10, brightness)
dim_level = brightness / 255.0
await self._func_channel.set_rgb_dim_level_async(
rgb_color_state=simple_rgb_color.name,
dim_level=dim_level,
)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the light off."""
await self._func_channel.turn_off_async()

View File

@@ -12,8 +12,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import HuumConfigEntry, HuumDataUpdateCoordinator
from .entity import HuumBaseEntity
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -24,8 +24,6 @@ from .entity import HuumBaseEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -36,7 +36,6 @@ class HuumConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle the initial step."""
errors = {}
if user_input is not None:
self._async_abort_entries_match({CONF_USERNAME: user_input[CONF_USERNAME]})
try:
huum = Huum(
user_input[CONF_USERNAME],
@@ -52,6 +51,9 @@ class HuumConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unknown error")
errors["base"] = "unknown"
else:
self._async_abort_entries_match(
{CONF_USERNAME: user_input[CONF_USERNAME]}
)
return self.async_create_entry(
title=user_input[CONF_USERNAME], data=user_input
)

View File

@@ -15,8 +15,6 @@ from .entity import HuumBaseEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -6,6 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/huum",
"integration_type": "device",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["huum==0.8.1"]
}

View File

@@ -16,8 +16,6 @@ from .entity import HuumBaseEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -1,93 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: Integration does not register custom actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow:
status: done
comment: |
PLANNED: Remove _LOGGER.error call from config_flow.py — the error
message is redundant with the errors dict entry.
dependency-transparency: done
docs-actions:
status: exempt
comment: Integration does not register custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: Integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: Integration does not register custom actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: Integration has no options flow.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable:
status: done
comment: |
PLANNED: Remove _LOGGER.error from coordinator.py — the message is already
passed to UpdateFailed, so logging it separately is redundant.
parallel-updates: done
reauthentication-flow: todo
test-coverage:
status: todo
comment: |
PLANNED: Use freezer-based time advancement instead of directly calling async_refresh().
# Gold
devices: done
diagnostics: todo
discovery: todo
discovery-update-info: todo
docs-data-update: done
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: todo
docs-use-cases: done
dynamic-devices:
status: exempt
comment: Single device per account, no dynamic devices.
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: All entities are core functionality.
entity-translations: done
exception-translations: todo
icon-translations:
status: done
comment: |
PLANNED: Remove the icon property from climate.py — entities should not set
custom icons. Use HA defaults or icon translations instead.
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: Integration has no repair scenarios.
stale-devices:
status: exempt
comment: Single device per config entry.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -14,8 +14,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import HuumConfigEntry, HuumDataUpdateCoordinator
from .entity import HuumBaseEntity
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -8,7 +8,6 @@ from homeassistant.core import HomeAssistant
from .coordinator import IndevoltConfigEntry, IndevoltCoordinator
PLATFORMS: list[Platform] = [
Platform.BUTTON,
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,

View File

@@ -1,70 +0,0 @@
"""Button platform for Indevolt integration."""
from __future__ import annotations
from dataclasses import dataclass, field
from typing import Final
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import IndevoltConfigEntry
from .coordinator import IndevoltCoordinator
from .entity import IndevoltEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class IndevoltButtonEntityDescription(ButtonEntityDescription):
"""Custom entity description class for Indevolt button entities."""
generation: list[int] = field(default_factory=lambda: [1, 2])
BUTTONS: Final = (
IndevoltButtonEntityDescription(
key="stop",
translation_key="stop",
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: IndevoltConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the button platform for Indevolt."""
coordinator = entry.runtime_data
device_gen = coordinator.generation
# Button initialization
async_add_entities(
IndevoltButtonEntity(coordinator=coordinator, description=description)
for description in BUTTONS
if device_gen in description.generation
)
class IndevoltButtonEntity(IndevoltEntity, ButtonEntity):
"""Represents a button entity for Indevolt devices."""
entity_description: IndevoltButtonEntityDescription
def __init__(
self,
coordinator: IndevoltCoordinator,
description: IndevoltButtonEntityDescription,
) -> None:
"""Initialize the Indevolt button entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{self.serial_number}_{description.key}"
async def async_press(self) -> None:
"""Handle the button press."""
await self.coordinator.async_execute_realtime_action([0, 0, 0])

View File

@@ -1,27 +1,16 @@
"""Constants for the Indevolt integration."""
from typing import Final
DOMAIN: Final = "indevolt"
# Default configurations
DEFAULT_PORT: Final = 8080
DOMAIN = "indevolt"
# Config entry fields
CONF_SERIAL_NUMBER: Final = "serial_number"
CONF_GENERATION: Final = "generation"
CONF_SERIAL_NUMBER = "serial_number"
CONF_GENERATION = "generation"
# API write/read keys for energy and value for outdoor/portable mode
ENERGY_MODE_READ_KEY: Final = "7101"
ENERGY_MODE_WRITE_KEY: Final = "47005"
PORTABLE_MODE: Final = 0
# API write key and value for real-time control mode
REALTIME_ACTION_KEY: Final = "47015"
REALTIME_ACTION_MODE: Final = 4
# Default values
DEFAULT_PORT = 8080
# API key fields
SENSOR_KEYS: Final[dict[int, list[str]]] = {
SENSOR_KEYS = {
1: [
"606",
"7101",

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any, Final
from typing import Any
from aiohttp import ClientError
from indevolt_api import IndevoltAPI, TimeOutException
@@ -21,37 +21,20 @@ from .const import (
CONF_SERIAL_NUMBER,
DEFAULT_PORT,
DOMAIN,
ENERGY_MODE_READ_KEY,
ENERGY_MODE_WRITE_KEY,
PORTABLE_MODE,
REALTIME_ACTION_KEY,
REALTIME_ACTION_MODE,
SENSOR_KEYS,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL: Final = 30
SCAN_INTERVAL = 30
type IndevoltConfigEntry = ConfigEntry[IndevoltCoordinator]
class DeviceTimeoutError(HomeAssistantError):
"""Raised when device push times out."""
class DeviceConnectionError(HomeAssistantError):
"""Raised when device push fails due to connection issues."""
class IndevoltCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Coordinator for fetching and pushing data to indevolt devices."""
friendly_name: str
config_entry: IndevoltConfigEntry
firmware_version: str | None
serial_number: str
device_model: str
generation: int
def __init__(self, hass: HomeAssistant, entry: IndevoltConfigEntry) -> None:
"""Initialize the indevolt coordinator."""
@@ -70,7 +53,6 @@ class IndevoltCoordinator(DataUpdateCoordinator[dict[str, Any]]):
session=async_get_clientsession(hass),
)
self.friendly_name = entry.title
self.serial_number = entry.data[CONF_SERIAL_NUMBER]
self.device_model = entry.data[CONF_MODEL]
self.generation = entry.data[CONF_GENERATION]
@@ -103,67 +85,6 @@ class IndevoltCoordinator(DataUpdateCoordinator[dict[str, Any]]):
try:
return await self.api.set_data(sensor_key, value)
except TimeOutException as err:
raise DeviceTimeoutError(f"Device push timed out: {err}") from err
raise HomeAssistantError(f"Device push timed out: {err}") from err
except (ClientError, ConnectionError, OSError) as err:
raise DeviceConnectionError(f"Device push failed: {err}") from err
async def async_switch_energy_mode(
self, target_mode: int, refresh: bool = True
) -> None:
"""Attempt to switch device to given energy mode."""
current_mode = self.data.get(ENERGY_MODE_READ_KEY)
# Ensure current energy mode is known
if current_mode is None:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_retrieve_current_energy_mode",
)
# Ensure device is not in "Outdoor/Portable mode"
if current_mode == PORTABLE_MODE:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="energy_mode_change_unavailable_outdoor_portable",
)
# Switch energy mode if required
if current_mode != target_mode:
try:
success = await self.async_push_data(ENERGY_MODE_WRITE_KEY, target_mode)
except (DeviceTimeoutError, DeviceConnectionError) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_switch_energy_mode",
) from err
if not success:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_switch_energy_mode",
)
if refresh:
await self.async_request_refresh()
async def async_execute_realtime_action(self, action: list[int]) -> None:
"""Switch mode, execute action, and refresh for real-time control."""
await self.async_switch_energy_mode(REALTIME_ACTION_MODE, refresh=False)
try:
success = await self.async_push_data(REALTIME_ACTION_KEY, action)
except (DeviceTimeoutError, DeviceConnectionError) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_execute_realtime_action",
) from err
if not success:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_execute_realtime_action",
)
await self.async_request_refresh()
raise HomeAssistantError(f"Device push failed: {err}") from err

View File

@@ -35,11 +35,6 @@
}
},
"entity": {
"button": {
"stop": {
"name": "Enable standby mode"
}
},
"number": {
"discharge_limit": {
"name": "Discharge limit"
@@ -294,19 +289,5 @@
"name": "LED indicator"
}
}
},
"exceptions": {
"energy_mode_change_unavailable_outdoor_portable": {
"message": "Energy mode cannot be changed when the device is in outdoor/portable mode"
},
"failed_to_execute_realtime_action": {
"message": "Failed to execute real-time action"
},
"failed_to_retrieve_current_energy_mode": {
"message": "Failed to retrieve current energy mode"
},
"failed_to_switch_energy_mode": {
"message": "Failed to switch to requested energy mode"
}
}
}

View File

@@ -4,21 +4,11 @@ from typing import Any
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers import device_registry as dr
from .client_wrapper import CannotConnect, InvalidAuth, create_client, validate_input
from .const import CONF_CLIENT_DEVICE_ID, DEFAULT_NAME, DOMAIN, PLATFORMS
from .coordinator import JellyfinConfigEntry, JellyfinDataUpdateCoordinator
from .services import async_setup_services
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Jellyfin component."""
await async_setup_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: JellyfinConfigEntry) -> bool:

View File

@@ -38,8 +38,6 @@ PLAYABLE_MEDIA_TYPES = [
MediaType.EPISODE,
MediaType.MOVIE,
MediaType.MUSIC,
MediaType.SEASON,
MediaType.TVSHOW,
]
@@ -100,8 +98,8 @@ async def build_item_response(
media_content_id: str,
) -> BrowseMedia:
"""Create response payload for the provided media query."""
title, media, thumbnail, media_type = await get_media_info(
hass, client, user_id, media_content_id
title, media, thumbnail = await get_media_info(
hass, client, user_id, media_content_type, media_content_id
)
if title is None or media is None:
@@ -113,12 +111,12 @@ async def build_item_response(
response = BrowseMedia(
media_class=CONTAINER_TYPES_SPECIFIC_MEDIA_CLASS.get(
str(media_type), MediaClass.DIRECTORY
str(media_content_type), MediaClass.DIRECTORY
),
media_content_id=media_content_id,
media_content_type=str(media_type),
media_content_type=str(media_content_type),
title=title,
can_play=bool(media_type in PLAYABLE_MEDIA_TYPES and media_content_id),
can_play=bool(media_content_type in PLAYABLE_MEDIA_TYPES and media_content_id),
can_expand=True,
children=children,
thumbnail=thumbnail,
@@ -209,18 +207,18 @@ async def get_media_info(
hass: HomeAssistant,
client: JellyfinClient,
user_id: str,
media_content_type: str | None,
media_content_id: str,
) -> tuple[str | None, list[dict[str, Any]] | None, str | None, str | None]:
) -> tuple[str | None, list[dict[str, Any]] | None, str | None]:
"""Fetch media info."""
thumbnail: str | None = None
title: str | None = None
media: list[dict[str, Any]] | None = None
media_type: str | None = None
item = await hass.async_add_executor_job(fetch_item, client, media_content_id)
if item is None:
return None, None, None, None
return None, None, None
title = item["Name"]
thumbnail = get_artwork_url(client, item)
@@ -233,6 +231,4 @@ async def get_media_info(
if not media or len(media) == 0:
media = None
media_type = CONTENT_TYPE_MAP.get(item["Type"], MEDIA_TYPE_NONE)
return title, media, thumbnail, media_type
return title, media, thumbnail

View File

@@ -74,10 +74,9 @@ MEDIA_CLASS_MAP = {
"MusicAlbum": MediaClass.ALBUM,
"MusicArtist": MediaClass.ARTIST,
"Audio": MediaClass.MUSIC,
"Series": MediaClass.TV_SHOW,
"Series": MediaClass.DIRECTORY,
"Movie": MediaClass.MOVIE,
"CollectionFolder": MediaClass.DIRECTORY,
"AggregateFolder": MediaClass.DIRECTORY,
"Folder": MediaClass.DIRECTORY,
"BoxSet": MediaClass.DIRECTORY,
"Episode": MediaClass.EPISODE,

View File

@@ -5,10 +5,5 @@
"default": "mdi:television-play"
}
}
},
"services": {
"play_media_shuffle": {
"service": "mdi:shuffle-variant"
}
}
}

View File

@@ -6,9 +6,7 @@ import logging
from typing import Any
from homeassistant.components.media_player import (
ATTR_MEDIA_ENQUEUE,
BrowseMedia,
MediaPlayerEnqueue,
MediaPlayerEntity,
MediaPlayerEntityFeature,
MediaPlayerState,
@@ -205,7 +203,6 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
| MediaPlayerEntityFeature.STOP
| MediaPlayerEntityFeature.SEEK
| MediaPlayerEntityFeature.SEARCH_MEDIA
| MediaPlayerEntityFeature.MEDIA_ENQUEUE
)
if "Mute" in commands and "Unmute" in commands:
@@ -248,20 +245,8 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
self, media_type: MediaType | str, media_id: str, **kwargs: Any
) -> None:
"""Play a piece of media."""
command = "PlayNow"
enqueue = kwargs.get(ATTR_MEDIA_ENQUEUE)
if enqueue == MediaPlayerEnqueue.NEXT:
command = "PlayNext"
elif enqueue == MediaPlayerEnqueue.ADD:
command = "PlayLast"
self.coordinator.api_client.jellyfin.remote_play_media(
self.session_id, [media_id], command
)
def play_media_shuffle(self, media_content_id: str) -> None:
"""Play a piece of media on shuffle."""
self.coordinator.api_client.jellyfin.remote_play_media(
self.session_id, [media_content_id], "PlayShuffle"
self.session_id, [media_id]
)
def set_volume_level(self, volume: float) -> None:

View File

@@ -1,55 +0,0 @@
"""Services for the Jellyfin integration."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant.components.media_player import (
ATTR_MEDIA,
ATTR_MEDIA_CONTENT_ID,
DOMAIN as MP_DOMAIN,
MediaPlayerEntityFeature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, service
from .const import DOMAIN
JELLYFIN_PLAY_MEDIA_SHUFFLE_SCHEMA = {
vol.Required(ATTR_MEDIA_CONTENT_ID): cv.string,
}
def _promote_media_fields(data: dict[str, Any]) -> dict[str, Any]:
"""If 'media' key exists, promote its fields to the top level."""
if ATTR_MEDIA in data and isinstance(data[ATTR_MEDIA], dict):
if ATTR_MEDIA_CONTENT_ID in data:
raise vol.Invalid(
f"Play media cannot contain both '{ATTR_MEDIA}' and '{ATTR_MEDIA_CONTENT_ID}'"
)
media_data = data[ATTR_MEDIA]
if ATTR_MEDIA_CONTENT_ID in media_data:
data[ATTR_MEDIA_CONTENT_ID] = media_data[ATTR_MEDIA_CONTENT_ID]
del data[ATTR_MEDIA]
return data
async def async_setup_services(hass: HomeAssistant) -> None:
"""Set up services for the Jellyfin component."""
service.async_register_platform_entity_service(
hass,
DOMAIN,
"play_media_shuffle",
entity_domain=MP_DOMAIN,
schema=vol.All(
_promote_media_fields,
cv.make_entity_service_schema(JELLYFIN_PLAY_MEDIA_SHUFFLE_SCHEMA),
),
func="play_media_shuffle",
required_features=MediaPlayerEntityFeature.PLAY_MEDIA,
)

View File

@@ -1,11 +0,0 @@
play_media_shuffle:
target:
entity:
integration: jellyfin
domain: media_player
fields:
media:
required: true
selector:
media:
example: '{"media_content_id": "a656b907eb3a73532e40e44b968d0225"}'

View File

@@ -42,17 +42,5 @@
}
}
}
},
"services": {
"play_media_shuffle": {
"description": "Starts playing specified media shuffled. Overwrites current play queue.",
"fields": {
"media": {
"description": "The media selected to play.",
"name": "Media"
}
},
"name": "Play media shuffled"
}
}
}

View File

@@ -46,16 +46,6 @@ class LitterRobotDataUpdateCoordinator(DataUpdateCoordinator[None]):
self.account = Account(websession=async_get_clientsession(hass))
self.previous_members: set[str] = set()
# Initialize previous_members from the device registry so that
# stale devices can be detected on the first update after restart.
device_registry = dr.async_get(hass)
for device in dr.async_entries_for_config_entry(
device_registry, config_entry.entry_id
):
for domain, identifier in device.identifiers:
if domain == DOMAIN:
self.previous_members.add(identifier)
async def _async_update_data(self) -> None:
"""Update all device states from the Litter-Robot API."""
try:

View File

@@ -1,78 +0,0 @@
"""The LoJack integration for Home Assistant."""
from __future__ import annotations
from dataclasses import dataclass, field
from lojack_api import ApiError, AuthenticationError, LoJackClient, Vehicle
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import LoJackCoordinator
PLATFORMS: list[Platform] = [Platform.DEVICE_TRACKER]
@dataclass
class LoJackData:
"""Runtime data for a LoJack config entry."""
client: LoJackClient
coordinators: list[LoJackCoordinator] = field(default_factory=list)
type LoJackConfigEntry = ConfigEntry[LoJackData]
async def async_setup_entry(hass: HomeAssistant, entry: LoJackConfigEntry) -> bool:
"""Set up LoJack from a config entry."""
session = async_get_clientsession(hass)
try:
client = await LoJackClient.create(
entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD],
session=session,
)
except AuthenticationError as err:
raise ConfigEntryAuthFailed(f"Authentication failed: {err}") from err
except ApiError as err:
raise ConfigEntryNotReady(f"API error during setup: {err}") from err
try:
vehicles = await client.list_devices()
except AuthenticationError as err:
await client.close()
raise ConfigEntryAuthFailed(f"Authentication failed: {err}") from err
except ApiError as err:
await client.close()
raise ConfigEntryNotReady(f"API error during setup: {err}") from err
data = LoJackData(client=client)
entry.runtime_data = data
try:
for vehicle in vehicles or []:
if isinstance(vehicle, Vehicle):
coordinator = LoJackCoordinator(hass, client, entry, vehicle)
await coordinator.async_config_entry_first_refresh()
data.coordinators.append(coordinator)
except Exception:
await client.close()
raise
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: LoJackConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
await entry.runtime_data.client.close()
return unload_ok

View File

@@ -1,111 +0,0 @@
"""Config flow for LoJack integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from lojack_api import ApiError, AuthenticationError, LoJackClient
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
)
class LoJackConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for LoJack."""
VERSION = 1
MINOR_VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
try:
async with await LoJackClient.create(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
session=async_get_clientsession(self.hass),
) as client:
user_id = client.user_id
except AuthenticationError:
errors["base"] = "invalid_auth"
except ApiError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
if not user_id:
errors["base"] = "unknown"
else:
await self.async_set_unique_id(user_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"LoJack ({user_input[CONF_USERNAME]})",
data=user_input,
)
return self.async_show_form(
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauthentication confirmation."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
try:
async with await LoJackClient.create(
reauth_entry.data[CONF_USERNAME],
user_input[CONF_PASSWORD],
session=async_get_clientsession(self.hass),
):
pass
except AuthenticationError:
errors["base"] = "invalid_auth"
except ApiError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}),
description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]},
errors=errors,
)

View File

@@ -1,13 +0,0 @@
"""Constants for the LoJack integration."""
from __future__ import annotations
import logging
from typing import Final
DOMAIN: Final = "lojack"
LOGGER = logging.getLogger(__package__)
# Default polling interval (in minutes)
DEFAULT_UPDATE_INTERVAL: Final = 5

View File

@@ -1,68 +0,0 @@
"""Data update coordinator for the LoJack integration."""
from __future__ import annotations
from datetime import timedelta
from typing import TYPE_CHECKING
from lojack_api import ApiError, AuthenticationError, LoJackClient
from lojack_api.device import Vehicle
from lojack_api.models import Location
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DEFAULT_UPDATE_INTERVAL, DOMAIN, LOGGER
if TYPE_CHECKING:
from . import LoJackConfigEntry
def get_device_name(vehicle: Vehicle) -> str:
"""Get a human-readable name for a vehicle."""
parts = [
str(vehicle.year) if vehicle.year else None,
vehicle.make,
vehicle.model,
]
name = " ".join(p for p in parts if p)
return name or vehicle.name or "Vehicle"
class LoJackCoordinator(DataUpdateCoordinator[Location]):
"""Class to manage fetching LoJack data for a single vehicle."""
config_entry: LoJackConfigEntry
def __init__(
self,
hass: HomeAssistant,
client: LoJackClient,
entry: ConfigEntry,
vehicle: Vehicle,
) -> None:
"""Initialize the coordinator."""
self.client = client
self.vehicle = vehicle
super().__init__(
hass,
LOGGER,
name=f"{DOMAIN}_{vehicle.id}",
update_interval=timedelta(minutes=DEFAULT_UPDATE_INTERVAL),
config_entry=entry,
)
async def _async_update_data(self) -> Location:
"""Fetch location data for this vehicle."""
try:
location = await self.vehicle.get_location(force=True)
except AuthenticationError as err:
raise ConfigEntryAuthFailed(f"Authentication failed: {err}") from err
except ApiError as err:
raise UpdateFailed(f"Error fetching data: {err}") from err
if location is None:
raise UpdateFailed("No location data available")
return location

View File

@@ -1,78 +0,0 @@
"""Device tracker platform for LoJack integration."""
from __future__ import annotations
from homeassistant.components.device_tracker import SourceType, TrackerEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import LoJackConfigEntry
from .const import DOMAIN
from .coordinator import LoJackCoordinator, get_device_name
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistant,
entry: LoJackConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up LoJack device tracker from a config entry."""
async_add_entities(
LoJackDeviceTracker(coordinator)
for coordinator in entry.runtime_data.coordinators
)
class LoJackDeviceTracker(CoordinatorEntity[LoJackCoordinator], TrackerEntity):
"""Representation of a LoJack device tracker."""
_attr_has_entity_name = True
_attr_name = None # Main entity of the device, uses device name directly
def __init__(self, coordinator: LoJackCoordinator) -> None:
"""Initialize the device tracker."""
super().__init__(coordinator)
self._attr_unique_id = coordinator.vehicle.id
@property
def device_info(self) -> DeviceInfo:
"""Return the device info."""
return DeviceInfo(
identifiers={(DOMAIN, self.coordinator.vehicle.id)},
name=get_device_name(self.coordinator.vehicle),
manufacturer="Spireon LoJack",
model=self.coordinator.vehicle.model,
serial_number=self.coordinator.vehicle.vin,
)
@property
def source_type(self) -> SourceType:
"""Return the source type of the device."""
return SourceType.GPS
@property
def latitude(self) -> float | None:
"""Return the latitude of the device."""
return self.coordinator.data.latitude
@property
def longitude(self) -> float | None:
"""Return the longitude of the device."""
return self.coordinator.data.longitude
@property
def location_accuracy(self) -> int:
"""Return the location accuracy of the device."""
if self.coordinator.data.accuracy is not None:
return int(self.coordinator.data.accuracy)
return 0
@property
def battery_level(self) -> int | None:
"""Return the battery level of the device (if applicable)."""
# LoJack devices report vehicle battery voltage, not percentage
return None

View File

@@ -1,12 +0,0 @@
{
"domain": "lojack",
"name": "LoJack",
"codeowners": ["@devinslick"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/lojack",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["lojack_api"],
"quality_scale": "silver",
"requirements": ["lojack-api==0.7.1"]
}

View File

@@ -1,81 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: This integration does not provide actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow: done
config-flow-test-coverage: done
dependency-transparency: done
docs-actions:
status: exempt
comment: This integration does not provide actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: This integration does not provide actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: This integration does not provide an options flow.
docs-installation-parameters:
status: done
comment: Documented in https://github.com/home-assistant/home-assistant.io/pull/43463
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: done
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery:
status: exempt
comment: This is a cloud polling integration with no local discovery mechanism since the devices are not on a local network.
discovery-update-info:
status: exempt
comment: This is a cloud polling integration with no local discovery mechanism.
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: Vehicles are tied to the user account. Changes require integration reload.
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: The device tracker entity is the primary entity and should be enabled by default.
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: No user-actionable repair scenarios identified for this integration.
stale-devices:
status: exempt
comment: Vehicles removed from the LoJack account stop appearing in API responses and become unavailable.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -1,38 +0,0 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"initiate_flow": {
"user": "[%key:common::config_flow::initiate_flow::account%]"
},
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::lojack::config::step::user::data_description::password%]"
},
"description": "Re-enter the password for {username}."
},
"user": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"password": "Your LoJack/Spireon account password",
"username": "Your LoJack/Spireon account email address"
},
"description": "Enter your LoJack/Spireon account credentials."
}
}
}
}

View File

@@ -47,14 +47,6 @@ COLOR_MODE_MAP = {
clusters.ColorControl.Enums.ColorModeEnum.kColorTemperatureMireds: ColorMode.COLOR_TEMP,
}
# Maximum Mireds value per the Matter spec is 65279
# Conversion between Kelvin and Mireds is 1,000,000 / Kelvin, so this corresponds to a minimum color temperature of ~15.3K
# Which is shown in UI as 15 Kelvin due to rounding.
# But converting 15 Kelvin back to Mireds gives 66666 which is above the maximum,
# and causes Invoke error, so cap values over maximum when sending
MATTER_MAX_MIREDS = 65279
# there's a bug in (at least) Espressif's implementation of light transitions
# on devices based on Matter 1.0. Mark potential devices with this issue.
# https://github.com/home-assistant/core/issues/113775
@@ -160,7 +152,7 @@ class MatterLight(MatterEntity, LightEntity):
)
await self.send_device_command(
clusters.ColorControl.Commands.MoveToColorTemperature(
colorTemperatureMireds=min(color_temp_mired, MATTER_MAX_MIREDS),
colorTemperatureMireds=color_temp_mired,
# transition in matter is measured in tenths of a second
transitionTime=int(transition * 10),
# allow setting the color while the light is off,

View File

@@ -80,7 +80,6 @@ class MatterUpdate(MatterEntity, UpdateEntity):
# Matter server.
_attr_should_poll = True
_software_update: MatterSoftwareVersion | None = None
_installed_software_version: int | None = None
_cancel_update: CALLBACK_TYPE | None = None
_attr_supported_features = (
UpdateEntityFeature.INSTALL
@@ -93,9 +92,6 @@ class MatterUpdate(MatterEntity, UpdateEntity):
def _update_from_device(self) -> None:
"""Update from device."""
self._installed_software_version = self.get_matter_attribute_value(
clusters.BasicInformation.Attributes.SoftwareVersion
)
self._attr_installed_version = self.get_matter_attribute_value(
clusters.BasicInformation.Attributes.SoftwareVersionString
)
@@ -127,22 +123,6 @@ class MatterUpdate(MatterEntity, UpdateEntity):
else:
self._attr_update_percentage = None
def _format_latest_version(
self, update_information: MatterSoftwareVersion
) -> str | None:
"""Return the version string to expose in Home Assistant."""
latest_version = update_information.software_version_string
if self._installed_software_version is None:
return latest_version
if update_information.software_version == self._installed_software_version:
return self._attr_installed_version or latest_version
if latest_version == self._attr_installed_version:
return f"{latest_version} ({update_information.software_version})"
return latest_version
async def async_update(self) -> None:
"""Call when the entity needs to be updated."""
try:
@@ -150,13 +130,11 @@ class MatterUpdate(MatterEntity, UpdateEntity):
node_id=self._endpoint.node.node_id
)
if not update_information:
self._software_update = None
self._attr_latest_version = self._attr_installed_version
self._attr_release_url = None
return
self._software_update = update_information
self._attr_latest_version = self._format_latest_version(update_information)
self._attr_latest_version = update_information.software_version_string
self._attr_release_url = update_information.release_notes_url
except UpdateCheckError as err:
@@ -234,12 +212,7 @@ class MatterUpdate(MatterEntity, UpdateEntity):
software_version: str | int | None = version
if self._software_update is not None and (
version is None
or version
in {
self._software_update.software_version_string,
self._attr_latest_version,
}
version is None or version == self._software_update.software_version_string
):
# Update to the version previously fetched and shown.
# We can pass the integer version directly to speedup download.

View File

@@ -203,80 +203,105 @@ class MoldIndicator(SensorEntity):
def _async_setup_sensor(self) -> None:
"""Set up the sensor and start tracking state changes."""
self.async_on_remove(
@callback
def mold_indicator_sensors_state_listener(
event: Event[EventStateChangedData],
) -> None:
"""Handle for state changes for dependent sensors."""
new_state = event.data["new_state"]
old_state = event.data["old_state"]
entity = event.data["entity_id"]
_LOGGER.debug(
"Sensor state change for %s that had old state %s and new state %s",
entity,
old_state,
new_state,
)
if self._update_sensor(entity, old_state, new_state):
if self._preview_callback:
calculated_state = self._async_calculate_state()
self._preview_callback(
calculated_state.state, calculated_state.attributes
)
# only write state to the state machine if we are not in preview mode
else:
self.async_schedule_update_ha_state(True)
@callback
def mold_indicator_startup() -> None:
"""Add listeners and get 1st state."""
_LOGGER.debug("Startup for %s", self.entity_id)
async_track_state_change_event(
self.hass,
self._entities.values(),
self._async_mold_indicator_sensor_state_listener,
)
)
# Replay current state of source entities
for entity_id in self._entities.values():
state = self.hass.states.get(entity_id)
state_event: Event[EventStateChangedData] = Event(
"", {"entity_id": entity_id, "new_state": state, "old_state": None}
)
self._async_mold_indicator_sensor_state_listener(
state_event, update_state=False
list(self._entities.values()),
mold_indicator_sensors_state_listener,
)
self._recalculate()
# Read initial state
indoor_temp = self.hass.states.get(self._entities[CONF_INDOOR_TEMP])
outdoor_temp = self.hass.states.get(self._entities[CONF_OUTDOOR_TEMP])
indoor_hum = self.hass.states.get(self._entities[CONF_INDOOR_HUMIDITY])
if self._preview_callback:
calculated_state = self._async_calculate_state()
self._preview_callback(calculated_state.state, calculated_state.attributes)
schedule_update = self._update_sensor(
self._entities[CONF_INDOOR_TEMP], None, indoor_temp
)
@callback
def _async_mold_indicator_sensor_state_listener(
self, event: Event[EventStateChangedData], update_state: bool = True
) -> None:
"""Handle state changes for dependent sensors."""
entity_id = event.data["entity_id"]
new_state = event.data["new_state"]
schedule_update = (
False
if not self._update_sensor(
self._entities[CONF_OUTDOOR_TEMP], None, outdoor_temp
)
else schedule_update
)
_LOGGER.debug(
"Sensor state change for %s that had old state %s and new state %s",
entity_id,
event.data["old_state"],
new_state,
)
schedule_update = (
False
if not self._update_sensor(
self._entities[CONF_INDOOR_HUMIDITY], None, indoor_hum
)
else schedule_update
)
# update state depending on which sensor changed
if entity_id == self._entities[CONF_INDOOR_TEMP]:
if schedule_update and not self._preview_callback:
self.async_schedule_update_ha_state(True)
if self._preview_callback:
# re-calculate dewpoint and mold indicator
self._calc_dewpoint()
self._calc_moldindicator()
if self._attr_native_value is None:
self._attr_available = False
else:
self._attr_available = True
calculated_state = self._async_calculate_state()
self._preview_callback(
calculated_state.state, calculated_state.attributes
)
mold_indicator_startup()
def _update_sensor(
self, entity: str, old_state: State | None, new_state: State | None
) -> bool:
"""Update information based on new sensor states."""
_LOGGER.debug("Sensor update for %s", entity)
if new_state is None:
return False
# If old_state is not set and new state is unknown then it means
# that the sensor just started up
if old_state is None and new_state.state == STATE_UNKNOWN:
return False
if entity == self._entities[CONF_INDOOR_TEMP]:
self._indoor_temp = self._get_temperature_from_state(new_state)
elif entity_id == self._entities[CONF_OUTDOOR_TEMP]:
elif entity == self._entities[CONF_OUTDOOR_TEMP]:
self._outdoor_temp = self._get_temperature_from_state(new_state)
elif entity_id == self._entities[CONF_INDOOR_HUMIDITY]:
elif entity == self._entities[CONF_INDOOR_HUMIDITY]:
self._indoor_hum = self._get_humidity_from_state(new_state)
if not update_state:
return
self._recalculate()
if self._preview_callback:
calculated_state = self._async_calculate_state()
self._preview_callback(calculated_state.state, calculated_state.attributes)
# only write state to the state machine if we are not in preview mode
else:
self.async_write_ha_state()
@callback
def _recalculate(self) -> None:
"""Recalculate mold indicator from cached sensor values."""
# Check if all sensors are available
if None in (self._indoor_temp, self._indoor_hum, self._outdoor_temp):
self._attr_available = False
self._attr_native_value = None
self._dewpoint = None
self._crit_temp = None
return
# Calculate dewpoint and mold indicator
self._calc_dewpoint()
self._calc_moldindicator()
self._attr_available = self._attr_native_value is not None
return True
def _get_value_from_state(
self,
@@ -351,6 +376,26 @@ class MoldIndicator(SensorEntity):
return self._get_value_from_state(state, validate_humidity)
async def async_update(self) -> None:
"""Calculate latest state."""
_LOGGER.debug("Update state for %s", self.entity_id)
# check all sensors
if None in (self._indoor_temp, self._indoor_hum, self._outdoor_temp):
self._attr_available = False
self._dewpoint = None
self._crit_temp = None
return
# re-calculate dewpoint and mold indicator
self._calc_dewpoint()
self._calc_moldindicator()
if self._attr_native_value is None:
self._attr_available = False
self._dewpoint = None
self._crit_temp = None
else:
self._attr_available = True
def _calc_dewpoint(self) -> None:
"""Calculate the dewpoint for the indoor air."""
# Use magnus approximation to calculate the dew point

View File

@@ -10,13 +10,9 @@ import httpx
import ollama
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.const import CONF_API_KEY, CONF_URL, Platform
from homeassistant.const import CONF_URL, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
ConfigEntryNotReady,
)
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
@@ -66,28 +62,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: OllamaConfigEntry) -> bool:
"""Set up Ollama from a config entry."""
settings = {**entry.data, **entry.options}
api_key = settings.get(CONF_API_KEY)
stripped_api_key = api_key.strip() if isinstance(api_key, str) else None
client = ollama.AsyncClient(
host=settings[CONF_URL],
headers=(
{"Authorization": f"Bearer {stripped_api_key}"}
if stripped_api_key
else None
),
verify=get_default_context(),
)
client = ollama.AsyncClient(host=settings[CONF_URL], verify=get_default_context())
try:
async with asyncio.timeout(DEFAULT_TIMEOUT):
await client.list()
except ollama.ResponseError as err:
if err.status_code in (401, 403):
raise ConfigEntryAuthFailed from err
if err.status_code >= 500 or err.status_code == 429:
raise ConfigEntryNotReady(err) from err
# If the response is a 4xx error other than 401 or 403, it likely means the URL is valid but not an Ollama instance,
# so we raise ConfigEntryError to show an error in the UI, instead of ConfigEntryNotReady which would just keep retrying.
raise ConfigEntryError(err) from err
except (TimeoutError, httpx.ConnectError) as err:
raise ConfigEntryNotReady(err) from err

View File

@@ -20,7 +20,7 @@ from homeassistant.config_entries import (
ConfigSubentryFlow,
SubentryFlowResult,
)
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME, CONF_URL
from homeassistant.const import CONF_LLM_HASS_API, CONF_NAME, CONF_URL
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, llm
from homeassistant.helpers.selector import (
@@ -68,17 +68,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
vol.Required(CONF_URL): TextSelector(
TextSelectorConfig(type=TextSelectorType.URL)
),
vol.Optional(CONF_API_KEY): TextSelector(
TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
},
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{
vol.Optional(CONF_API_KEY): TextSelector(
TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
}
)
@@ -89,40 +78,9 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 3
MINOR_VERSION = 3
async def _async_validate_connection(
self, url: str, api_key: str | None
) -> dict[str, str]:
"""Validate connection and credentials against the Ollama server."""
errors: dict[str, str] = {}
try:
client = ollama.AsyncClient(
host=url,
headers={"Authorization": f"Bearer {api_key}"} if api_key else None,
verify=get_default_context(),
)
async with asyncio.timeout(DEFAULT_TIMEOUT):
await client.list()
except ollama.ResponseError as err:
if err.status_code in (401, 403):
errors["base"] = "invalid_auth"
else:
_LOGGER.warning(
"Error response from Ollama server at %s: status %s, detail: %s",
url,
err.status_code,
str(err),
)
errors["base"] = "unknown"
except TimeoutError, httpx.ConnectError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return errors
def __init__(self) -> None:
"""Initialize config flow."""
self.url: str | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@@ -134,10 +92,9 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
)
errors = {}
url = user_input[CONF_URL].strip()
api_key = user_input.get(CONF_API_KEY)
if api_key:
api_key = api_key.strip()
url = user_input[CONF_URL]
self._async_abort_entries_match({CONF_URL: url})
try:
url = cv.url(url)
@@ -151,8 +108,15 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
self._async_abort_entries_match({CONF_URL: url})
errors = await self._async_validate_connection(url, api_key)
try:
client = ollama.AsyncClient(host=url, verify=get_default_context())
async with asyncio.timeout(DEFAULT_TIMEOUT):
await client.list()
except TimeoutError, httpx.ConnectError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if errors:
return self.async_show_form(
@@ -163,65 +127,9 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
entry_data: dict[str, str] = {CONF_URL: url}
if api_key:
entry_data[CONF_API_KEY] = api_key
return self.async_create_entry(title=url, data=entry_data)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication when existing credentials are invalid."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauthentication confirmation."""
reauth_entry = self._get_reauth_entry()
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_REAUTH_DATA_SCHEMA,
)
api_key = user_input.get(CONF_API_KEY)
if api_key:
api_key = api_key.strip()
errors = await self._async_validate_connection(
reauth_entry.data[CONF_URL], api_key
)
if errors:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=self.add_suggested_values_to_schema(
STEP_REAUTH_DATA_SCHEMA, user_input
),
errors=errors,
)
updated_data = {
**reauth_entry.data,
CONF_URL: reauth_entry.data[CONF_URL],
}
if api_key:
updated_data[CONF_API_KEY] = api_key
else:
updated_data.pop(CONF_API_KEY, None)
updated_options = {
key: value
for key, value in reauth_entry.options.items()
if key != CONF_API_KEY
}
return self.async_update_reload_and_abort(
reauth_entry,
data=updated_data,
options=updated_options,
return self.async_create_entry(
title=url,
data={CONF_URL: url},
)
@classmethod

View File

@@ -1,26 +1,16 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"invalid_url": "[%key:common::config_flow::error::invalid_host%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"reauth_confirm": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"description": "The Ollama integration needs to re-authenticate with your Ollama API key.",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"url": "[%key:common::config_flow::data::url%]"
}
}

View File

@@ -17,7 +17,6 @@ from onvif.client import (
from onvif.exceptions import ONVIFError
from onvif.util import stringify_onvif_error
import onvif_parsers
import onvif_parsers.util
from zeep.exceptions import Fault, TransportError, ValidationError, XMLParseError
from homeassistant.components import webhook
@@ -197,7 +196,7 @@ class EventManager:
topic = msg.Topic._value_1.rstrip("/.") # noqa: SLF001
try:
events = await onvif_parsers.parse(topic, unique_id, msg)
event = await onvif_parsers.parse(topic, unique_id, msg)
error = None
except onvif_parsers.errors.UnknownTopicError:
if topic not in UNHANDLED_TOPICS:
@@ -205,43 +204,42 @@ class EventManager:
"%s: No registered handler for event from %s: %s",
self.name,
unique_id,
onvif_parsers.util.event_to_debug_format(msg),
msg,
)
UNHANDLED_TOPICS.add(topic)
continue
except (AttributeError, KeyError) as e:
events = []
event = None
error = e
if not events:
if not event:
LOGGER.warning(
"%s: Unable to parse event from %s: %s: %s",
self.name,
unique_id,
error,
onvif_parsers.util.event_to_debug_format(msg),
msg,
)
continue
for event in events:
value = event.value
if event.device_class == "timestamp" and isinstance(value, str):
value = _local_datetime_or_none(value)
value = event.value
if event.device_class == "timestamp" and isinstance(value, str):
value = _local_datetime_or_none(value)
ha_event = Event(
uid=event.uid,
name=event.name,
platform=event.platform,
device_class=event.device_class,
unit_of_measurement=event.unit_of_measurement,
value=value,
entity_category=ENTITY_CATEGORY_MAPPING.get(
event.entity_category or ""
),
entity_enabled=event.entity_enabled,
)
self.get_uids_by_platform(ha_event.platform).add(ha_event.uid)
self._events[ha_event.uid] = ha_event
ha_event = Event(
uid=event.uid,
name=event.name,
platform=event.platform,
device_class=event.device_class,
unit_of_measurement=event.unit_of_measurement,
value=value,
entity_category=ENTITY_CATEGORY_MAPPING.get(
event.entity_category or ""
),
entity_enabled=event.entity_enabled,
)
self.get_uids_by_platform(ha_event.platform).add(ha_event.uid)
self._events[ha_event.uid] = ha_event
def get_uid(self, uid: str) -> Event | None:
"""Retrieve event for given id."""

View File

@@ -15,7 +15,7 @@
"loggers": ["onvif", "wsdiscovery", "zeep"],
"requirements": [
"onvif-zeep-async==4.0.4",
"onvif_parsers==2.3.0",
"onvif_parsers==1.2.2",
"WSDiscovery==2.1.2"
]
}

View File

@@ -4,7 +4,6 @@
"codeowners": [],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/orvibo",
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["orvibo"],
"quality_scale": "legacy",

View File

@@ -20,7 +20,7 @@ from homeassistant.const import (
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import Event, HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import track_point_in_utc_time
from homeassistant.helpers.typing import ConfigType
@@ -37,7 +37,6 @@ DEFAULT_SEND_DELAY = 0.0
DOMAIN = "pilight"
EVENT = "pilight_received"
type EVENT_TYPE = Event[dict[str, Any]]
# The Pilight code schema depends on the protocol. Thus only require to have
# the protocol information. Ensure that protocol is in a list otherwise

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import datetime
from typing import Any
import voluptuous as vol
@@ -25,7 +24,7 @@ from homeassistant.helpers.event import track_point_in_time
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import dt as dt_util
from . import EVENT, EVENT_TYPE
from . import EVENT
CONF_VARIABLE = "variable"
CONF_RESET_DELAY_SEC = "reset_delay_sec"
@@ -47,8 +46,6 @@ PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend(
}
)
type _PAYLOAD_SET_TYPE = str | int | float
def setup_platform(
hass: HomeAssistant,
@@ -62,12 +59,12 @@ def setup_platform(
[
PilightTriggerSensor(
hass=hass,
name=config[CONF_NAME],
variable=config[CONF_VARIABLE],
payload=config[CONF_PAYLOAD],
on_value=config[CONF_PAYLOAD_ON],
off_value=config[CONF_PAYLOAD_OFF],
rst_dly_sec=config[CONF_RESET_DELAY_SEC],
name=config.get(CONF_NAME),
variable=config.get(CONF_VARIABLE),
payload=config.get(CONF_PAYLOAD),
on_value=config.get(CONF_PAYLOAD_ON),
off_value=config.get(CONF_PAYLOAD_OFF),
rst_dly_sec=config.get(CONF_RESET_DELAY_SEC),
)
]
)
@@ -76,11 +73,11 @@ def setup_platform(
[
PilightBinarySensor(
hass=hass,
name=config[CONF_NAME],
variable=config[CONF_VARIABLE],
payload=config[CONF_PAYLOAD],
on_value=config[CONF_PAYLOAD_ON],
off_value=config[CONF_PAYLOAD_OFF],
name=config.get(CONF_NAME),
variable=config.get(CONF_VARIABLE),
payload=config.get(CONF_PAYLOAD),
on_value=config.get(CONF_PAYLOAD_ON),
off_value=config.get(CONF_PAYLOAD_OFF),
)
]
)
@@ -89,15 +86,7 @@ def setup_platform(
class PilightBinarySensor(BinarySensorEntity):
"""Representation of a binary sensor that can be updated using Pilight."""
def __init__(
self,
hass: HomeAssistant,
name: str,
variable: str,
payload: dict[str, Any],
on_value: _PAYLOAD_SET_TYPE,
off_value: _PAYLOAD_SET_TYPE,
) -> None:
def __init__(self, hass, name, variable, payload, on_value, off_value):
"""Initialize the sensor."""
self._attr_is_on = False
self._hass = hass
@@ -109,7 +98,7 @@ class PilightBinarySensor(BinarySensorEntity):
hass.bus.listen(EVENT, self._handle_code)
def _handle_code(self, call: EVENT_TYPE) -> None:
def _handle_code(self, call):
"""Handle received code by the pilight-daemon.
If the code matches the defined payload
@@ -137,15 +126,8 @@ class PilightTriggerSensor(BinarySensorEntity):
"""Representation of a binary sensor that can be updated using Pilight."""
def __init__(
self,
hass: HomeAssistant,
name: str,
variable: str,
payload: dict[str, Any],
on_value: _PAYLOAD_SET_TYPE,
off_value: _PAYLOAD_SET_TYPE,
rst_dly_sec: int,
) -> None:
self, hass, name, variable, payload, on_value, off_value, rst_dly_sec=30
):
"""Initialize the sensor."""
self._attr_is_on = False
self._hass = hass
@@ -155,17 +137,17 @@ class PilightTriggerSensor(BinarySensorEntity):
self._on_value = on_value
self._off_value = off_value
self._reset_delay_sec = rst_dly_sec
self._delay_after: datetime.datetime | None = None
self._delay_after = None
self._hass = hass
hass.bus.listen(EVENT, self._handle_code)
def _reset_state(self, _: datetime.datetime) -> None:
def _reset_state(self, call):
self._attr_is_on = False
self._delay_after = None
self.schedule_update_ha_state()
def _handle_code(self, call: EVENT_TYPE) -> None:
def _handle_code(self, call):
"""Handle received code by the pilight-daemon.
If the code matches the defined payload

View File

@@ -1,7 +1,5 @@
"""Base class for pilight."""
from typing import Any
import voluptuous as vol
from homeassistant.const import (
@@ -12,10 +10,8 @@ from homeassistant.const import (
STATE_OFF,
STATE_ON,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN, EVENT, SERVICE_NAME
from .const import (
@@ -64,19 +60,19 @@ class PilightBaseDevice(RestoreEntity):
_attr_assumed_state = True
_attr_should_poll = False
def __init__(self, hass: HomeAssistant, name: str, config: ConfigType) -> None:
def __init__(self, hass, name, config):
"""Initialize a device."""
self._hass = hass
self._attr_name = config.get(CONF_NAME, name)
self._attr_is_on: bool | None = False
self._attr_is_on = False
self._code_on = config.get(CONF_ON_CODE)
self._code_off = config.get(CONF_OFF_CODE)
code_on_receive = config.get(CONF_ON_CODE_RECEIVE, [])
code_off_receive = config.get(CONF_OFF_CODE_RECEIVE, [])
self._code_on_receive: list[_ReceiveHandle] = []
self._code_off_receive: list[_ReceiveHandle] = []
self._code_on_receive = []
self._code_off_receive = []
for code_list, conf in (
(self._code_on_receive, code_on_receive),
@@ -89,7 +85,7 @@ class PilightBaseDevice(RestoreEntity):
if any(self._code_on_receive) or any(self._code_off_receive):
hass.bus.listen(EVENT, self._handle_code)
self._brightness: int | None = 255
self._brightness = 255
async def async_added_to_hass(self) -> None:
"""Call when entity about to be added to hass."""
@@ -151,18 +147,18 @@ class PilightBaseDevice(RestoreEntity):
class _ReceiveHandle:
def __init__(self, config: dict[str, Any], echo: bool) -> None:
def __init__(self, config, echo):
"""Initialize the handle."""
self.config_items = config.items()
self.echo = echo
def match(self, code: dict[str, Any]) -> bool:
def match(self, code):
"""Test if the received code matches the configured values.
The received values have to be a subset of the configured options.
"""
return self.config_items <= code.items()
def run(self, switch: PilightBaseDevice, turn_on: bool) -> None:
def run(self, switch, turn_on):
"""Change the state of the switch."""
switch.set_state(turn_on=turn_on, send_code=self.echo)

Some files were not shown because too many files have changed in this diff Show More