Compare commits

..

2 Commits

Author SHA1 Message Date
epenet 478ec2b377 fix 2025-02-11 15:27:42 +00:00
epenet a66c3218f5 Do not test internals in flo tests 2025-02-11 15:15:51 +00:00
1429 changed files with 15557 additions and 89188 deletions
Binary file not shown.

Before

Width:  |  Height:  |  Size: 99 KiB

After

Width:  |  Height:  |  Size: 65 KiB

-100
View File
@@ -1,100 +0,0 @@
# Instructions for GitHub Copilot
This repository holds the core of Home Assistant, a Python 3 based home
automation application.
- Python code must be compatible with Python 3.13
- Use the newest Python language features if possible:
- Pattern matching
- Type hints
- f-strings for string formatting over `%` or `.format()`
- Dataclasses
- Walrus operator
- Code quality tools:
- Formatting: Ruff
- Linting: PyLint and Ruff
- Type checking: MyPy
- Testing: pytest with plain functions and fixtures
- Inline code documentation:
- File headers should be short and concise:
```python
"""Integration for Peblar EV chargers."""
```
- Every method and function needs a docstring:
```python
async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool:
"""Set up Peblar from a config entry."""
...
```
- All code and comments and other text are written in American English
- Follow existing code style patterns as much as possible
- Core locations:
- Shared constants: `homeassistant/const.py`, use them instead of hardcoding
strings or creating duplicate integration constants.
- Integration files:
- Constants: `homeassistant/components/{domain}/const.py`
- Models: `homeassistant/components/{domain}/models.py`
- Coordinator: `homeassistant/components/{domain}/coordinator.py`
- Config flow: `homeassistant/components/{domain}/config_flow.py`
- Platform code: `homeassistant/components/{domain}/{platform}.py`
- All external I/O operations must be async
- Async patterns:
- Avoid sleeping in loops
- Avoid awaiting in loops, gather instead
- No blocking calls
- Polling:
- Follow update coordinator pattern, when possible
- Polling interval may not be configurable by the user
- For local network polling, the minimum interval is 5 seconds
- For cloud polling, the minimum interval is 60 seconds
- Error handling:
- Use specific exceptions from `homeassistant.exceptions`
- Setup failures:
- Temporary: Raise `ConfigEntryNotReady`
- Permanent: Use `ConfigEntryError`
- Logging:
- Message format:
- No periods at end
- No integration names or domains (added automatically)
- No sensitive data (keys, tokens, passwords), even when those are incorrect.
- Be very restrictive on the use of logging info messages, use debug for
anything which is not targeting the user.
- Use lazy logging (no f-strings):
```python
_LOGGER.debug("This is a log message with %s", variable)
```
- Entities:
- Ensure unique IDs for state persistence:
- Unique IDs should not contain values that are subject to user or network change.
- An ID needs to be unique per platform, not per integration.
- The ID does not have to contain the integration domain or platform.
- Acceptable examples:
- Serial number of a device
- MAC address of a device formatted using `homeassistant.helpers.device_registry.format_mac`
Do not obtain the MAC address through arp cache of local network access,
only use the MAC address provided by discovery or the device itself.
- Unique identifier that is physically printed on the device or burned into an EEPROM
- Not acceptable examples:
- IP Address
- Device name
- Hostname
- URL
- Email address
- Username
- For entities that are setup by a config entry, the config entry ID
can be used as a last resort if no other Unique ID is available.
For example: `f"{entry.entry_id}-battery"`
- If the state value is unknown, use `None`
- Do not use the `unavailable` string as a state value,
implement the `available()` property method instead
- Do not use the `unknown` string as a state value, use `None` instead
- Extra entity state attributes:
- The keys of all state attributes should always be present
- If the value is unknown, use `None`
- Provide descriptive state attributes
- Testing:
- Test location: `tests/components/{domain}/`
- Use pytest fixtures from `tests.common`
- Mock external dependencies
- Use snapshots for complex data
- Follow existing test patterns
+16 -16
View File
@@ -69,7 +69,7 @@ jobs:
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
- name: Upload translations
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: translations
path: translations.tar.gz
@@ -175,7 +175,7 @@ jobs:
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: translations
@@ -197,7 +197,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2025.02.0
uses: home-assistant/builder@2024.08.2
with:
args: |
$BUILD_ARGS \
@@ -263,7 +263,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2025.02.0
uses: home-assistant/builder@2024.08.2
with:
args: |
$BUILD_ARGS \
@@ -324,7 +324,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Install Cosign
uses: sigstore/cosign-installer@v3.8.1
uses: sigstore/cosign-installer@v3.8.0
with:
cosign-release: "v2.2.3"
@@ -448,9 +448,6 @@ jobs:
environment: ${{ needs.init.outputs.channel }}
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
@@ -462,7 +459,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: translations
@@ -476,13 +473,16 @@ jobs:
run: |
# Remove dist, build, and homeassistant.egg-info
# when build locally for testing!
pip install build
pip install twine build
python -m build
- name: Upload package to PyPI
uses: pypa/gh-action-pypi-publish@v1.12.4
with:
skip-existing: true
- name: Upload package
shell: bash
run: |
export TWINE_USERNAME="__token__"
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
twine upload dist/* --skip-existing
hassfest-image:
name: Build and test hassfest image
@@ -509,7 +509,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
uses: docker/build-push-action@ca877d9245402d1537745e0e356eab47c3520991 # v6.13.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@@ -522,7 +522,7 @@ jobs:
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
uses: docker/build-push-action@ca877d9245402d1537745e0e356eab47c3520991 # v6.13.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
+36 -36
View File
@@ -240,7 +240,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.0
with:
path: venv
key: >-
@@ -256,7 +256,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -286,7 +286,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -295,7 +295,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -326,7 +326,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -335,7 +335,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -366,7 +366,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -375,7 +375,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -482,7 +482,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.0
with:
path: venv
key: >-
@@ -490,7 +490,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.0
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -537,7 +537,7 @@ jobs:
python --version
uv pip freeze >> pip_freeze.txt
- name: Upload pip_freeze artifact
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: pip-freeze-${{ matrix.python-version }}
path: pip_freeze.txt
@@ -578,7 +578,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -611,7 +611,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -649,7 +649,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -661,7 +661,7 @@ jobs:
. venv/bin/activate
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
- name: Upload licenses
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
path: licenses-${{ matrix.python-version }}.json
@@ -692,7 +692,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -739,7 +739,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -791,7 +791,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -799,7 +799,7 @@ jobs:
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@v4.2.1
uses: actions/cache@v4.2.0
with:
path: .mypy_cache
key: >-
@@ -865,7 +865,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -877,7 +877,7 @@ jobs:
. venv/bin/activate
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
- name: Upload pytest_buckets
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: pytest_buckets
path: pytest_buckets.txt
@@ -929,7 +929,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -942,7 +942,7 @@ jobs:
run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Download pytest_buckets
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: pytest_buckets
- name: Compile English translations
@@ -980,14 +980,14 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
@@ -1051,7 +1051,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -1108,7 +1108,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
@@ -1116,7 +1116,7 @@ jobs:
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
@@ -1181,7 +1181,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -1239,7 +1239,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
@@ -1247,7 +1247,7 @@ jobs:
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
@@ -1271,7 +1271,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
pattern: coverage-*
- name: Upload coverage to Codecov
@@ -1328,7 +1328,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.1
uses: actions/cache/restore@v4.2.0
with:
path: venv
fail-on-cache-miss: true
@@ -1382,14 +1382,14 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
@@ -1410,7 +1410,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
pattern: coverage-*
- name: Upload coverage to Codecov
+2 -2
View File
@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.28.10
uses: github/codeql-action/init@v3.28.9
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.28.10
uses: github/codeql-action/analyze@v3.28.9
with:
category: "/language:python"
+49 -13
View File
@@ -91,7 +91,7 @@ jobs:
) > build_constraints.txt
- name: Upload env_file
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: env_file
path: ./.env_file
@@ -99,14 +99,14 @@ jobs:
overwrite: true
- name: Upload build_constraints
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: build_constraints
path: ./build_constraints.txt
overwrite: true
- name: Upload requirements_diff
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: requirements_diff
path: ./requirements_diff.txt
@@ -118,7 +118,7 @@ jobs:
python -m script.gen_requirements_all ci
- name: Upload requirements_all_wheels
uses: actions/upload-artifact@v4.6.1
uses: actions/upload-artifact@v4.6.0
with:
name: requirements_all_wheels
path: ./requirements_all_wheels_*.txt
@@ -138,17 +138,17 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: requirements_diff
@@ -187,22 +187,22 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: requirements_diff
- name: Download requirements_all_wheels
uses: actions/download-artifact@v4.1.9
uses: actions/download-artifact@v4.1.8
with:
name: requirements_all_wheels
@@ -218,7 +218,15 @@ jobs:
sed -i "/uv/d" requirements.txt
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
- name: Split requirements all
run: |
# We split requirements all into multiple files.
# This is to prevent the build from running out of memory when
# resolving packages on 32-bits systems (like armhf, armv7).
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
- name: Build wheels (part 1)
uses: home-assistant/wheels@2024.11.0
with:
abi: ${{ matrix.abi }}
@@ -230,4 +238,32 @@ jobs:
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txt"
requirements: "requirements_all.txtaa"
- name: Build wheels (part 2)
uses: home-assistant/wheels@2024.11.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtab"
- name: Build wheels (part 3)
uses: home-assistant/wheels@2024.11.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtac"
+1 -1
View File
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.7
rev: v0.9.1
hooks:
- id: ruff
args:
-4
View File
@@ -103,7 +103,6 @@ homeassistant.components.auth.*
homeassistant.components.automation.*
homeassistant.components.awair.*
homeassistant.components.axis.*
homeassistant.components.azure_storage.*
homeassistant.components.backup.*
homeassistant.components.baf.*
homeassistant.components.bang_olufsen.*
@@ -235,7 +234,6 @@ homeassistant.components.here_travel_time.*
homeassistant.components.history.*
homeassistant.components.history_stats.*
homeassistant.components.holiday.*
homeassistant.components.home_connect.*
homeassistant.components.homeassistant.*
homeassistant.components.homeassistant_alerts.*
homeassistant.components.homeassistant_green.*
@@ -408,7 +406,6 @@ homeassistant.components.raspberry_pi.*
homeassistant.components.rdw.*
homeassistant.components.recollect_waste.*
homeassistant.components.recorder.*
homeassistant.components.remember_the_milk.*
homeassistant.components.remote.*
homeassistant.components.renault.*
homeassistant.components.reolink.*
@@ -440,7 +437,6 @@ homeassistant.components.select.*
homeassistant.components.sensibo.*
homeassistant.components.sensirion_ble.*
homeassistant.components.sensor.*
homeassistant.components.sensorpush_cloud.*
homeassistant.components.sensoterra.*
homeassistant.components.senz.*
homeassistant.components.sfr_box.*
+2 -9
View File
@@ -38,17 +38,10 @@
"module": "pytest",
"justMyCode": false,
"args": [
"--timeout=10",
"--picked"
],
},
{
"name": "Home Assistant: Debug Current Test File",
"type": "debugpy",
"request": "launch",
"module": "pytest",
"console": "integratedTerminal",
"args": ["-vv", "${file}"]
},
{
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
// See https://www.home-assistant.io/integrations/debugpy/
@@ -84,4 +77,4 @@
]
}
]
}
}
Generated
+6 -16
View File
@@ -180,8 +180,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/azure_event_hub/ @eavanvalkenburg
/tests/components/azure_event_hub/ @eavanvalkenburg
/homeassistant/components/azure_service_bus/ @hfurubotten
/homeassistant/components/azure_storage/ @zweckj
/tests/components/azure_storage/ @zweckj
/homeassistant/components/backup/ @home-assistant/core
/tests/components/backup/ @home-assistant/core
/homeassistant/components/baf/ @bdraco @jfroy
@@ -969,8 +967,8 @@ build.json @home-assistant/supervisor
/tests/components/motionblinds_ble/ @LennP @jerrybboy
/homeassistant/components/motioneye/ @dermotduffy
/tests/components/motioneye/ @dermotduffy
/homeassistant/components/motionmount/ @laiho-vogels
/tests/components/motionmount/ @laiho-vogels
/homeassistant/components/motionmount/ @RJPoelstra
/tests/components/motionmount/ @RJPoelstra
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
/homeassistant/components/msteams/ @peroyvind
@@ -1053,8 +1051,8 @@ build.json @home-assistant/supervisor
/tests/components/numato/ @clssn
/homeassistant/components/number/ @home-assistant/core @Shulyaka
/tests/components/number/ @home-assistant/core @Shulyaka
/homeassistant/components/nut/ @bdraco @ollo69 @pestevez @tdfountain
/tests/components/nut/ @bdraco @ollo69 @pestevez @tdfountain
/homeassistant/components/nut/ @bdraco @ollo69 @pestevez
/tests/components/nut/ @bdraco @ollo69 @pestevez
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
/tests/components/nws/ @MatthewFlamm @kamiyo
/homeassistant/components/nyt_games/ @joostlek
@@ -1146,8 +1144,8 @@ build.json @home-assistant/supervisor
/tests/components/philips_js/ @elupus
/homeassistant/components/pi_hole/ @shenxn
/tests/components/pi_hole/ @shenxn
/homeassistant/components/picnic/ @corneyl @codesalatdev
/tests/components/picnic/ @corneyl @codesalatdev
/homeassistant/components/picnic/ @corneyl
/tests/components/picnic/ @corneyl
/homeassistant/components/ping/ @jpbede
/tests/components/ping/ @jpbede
/homeassistant/components/plaato/ @JohNan
@@ -1344,8 +1342,6 @@ build.json @home-assistant/supervisor
/tests/components/sensorpro/ @bdraco
/homeassistant/components/sensorpush/ @bdraco
/tests/components/sensorpush/ @bdraco
/homeassistant/components/sensorpush_cloud/ @sstallion
/tests/components/sensorpush_cloud/ @sstallion
/homeassistant/components/sensoterra/ @markruys
/tests/components/sensoterra/ @markruys
/homeassistant/components/sentry/ @dcramer @frenck
@@ -1401,8 +1397,6 @@ build.json @home-assistant/supervisor
/tests/components/smappee/ @bsmappee
/homeassistant/components/smart_meter_texas/ @grahamwetzler
/tests/components/smart_meter_texas/ @grahamwetzler
/homeassistant/components/smartthings/ @joostlek
/tests/components/smartthings/ @joostlek
/homeassistant/components/smarttub/ @mdz
/tests/components/smarttub/ @mdz
/homeassistant/components/smarty/ @z0mbieprocess
@@ -1417,8 +1411,6 @@ build.json @home-assistant/supervisor
/tests/components/snapcast/ @luar123
/homeassistant/components/snmp/ @nmaggioni
/tests/components/snmp/ @nmaggioni
/homeassistant/components/snoo/ @Lash-L
/tests/components/snoo/ @Lash-L
/homeassistant/components/snooz/ @AustinBrunkhorst
/tests/components/snooz/ @AustinBrunkhorst
/homeassistant/components/solaredge/ @frenck @bdraco
@@ -1699,8 +1691,6 @@ build.json @home-assistant/supervisor
/tests/components/weatherflow_cloud/ @jeeftor
/homeassistant/components/weatherkit/ @tjhorner
/tests/components/weatherkit/ @tjhorner
/homeassistant/components/webdav/ @jpbede
/tests/components/webdav/ @jpbede
/homeassistant/components/webhook/ @home-assistant/core
/tests/components/webhook/ @home-assistant/core
/homeassistant/components/webmin/ @autinerd
Generated
+19 -19
View File
@@ -12,26 +12,8 @@ ENV \
ARG QEMU_CPU
# Home Assistant S6-Overlay
COPY rootfs /
# Needs to be redefined inside the FROM statement to be set for RUN commands
ARG BUILD_ARCH
# Get go2rtc binary
RUN \
case "${BUILD_ARCH}" in \
"aarch64") go2rtc_suffix='arm64' ;; \
"armhf") go2rtc_suffix='armv6' ;; \
"armv7") go2rtc_suffix='arm' ;; \
*) go2rtc_suffix=${BUILD_ARCH} ;; \
esac \
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
&& chmod +x /bin/go2rtc \
# Verify go2rtc can be executed
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.6.1
RUN pip3 install uv==0.5.27
WORKDIR /usr/src
@@ -60,4 +42,22 @@ RUN \
&& python3 -m compileall \
homeassistant/homeassistant
# Home Assistant S6-Overlay
COPY rootfs /
# Needs to be redefined inside the FROM statement to be set for RUN commands
ARG BUILD_ARCH
# Get go2rtc binary
RUN \
case "${BUILD_ARCH}" in \
"aarch64") go2rtc_suffix='arm64' ;; \
"armhf") go2rtc_suffix='armv6' ;; \
"armv7") go2rtc_suffix='arm' ;; \
*) go2rtc_suffix=${BUILD_ARCH} ;; \
esac \
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
&& chmod +x /bin/go2rtc \
# Verify go2rtc can be executed
&& go2rtc --version
WORKDIR /config
+5 -5
View File
@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.02.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.02.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.02.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.02.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.02.1
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.0
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
+115 -100
View File
@@ -74,7 +74,6 @@ from .core_config import async_process_ha_core_config
from .exceptions import HomeAssistantError
from .helpers import (
area_registry,
backup,
category_registry,
config_validation as cv,
device_registry,
@@ -135,12 +134,14 @@ DATA_REGISTRIES_LOADED: HassKey[None] = HassKey("bootstrap_registries_loaded")
LOG_SLOW_STARTUP_INTERVAL = 60
SLOW_STARTUP_CHECK_INTERVAL = 1
STAGE_0_SUBSTAGE_TIMEOUT = 60
STAGE_1_TIMEOUT = 120
STAGE_2_TIMEOUT = 300
WRAP_UP_TIMEOUT = 300
COOLDOWN_TIME = 60
DEBUGGER_INTEGRATIONS = {"debugpy"}
# Core integrations are unconditionally loaded
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
@@ -151,10 +152,6 @@ LOGGING_AND_HTTP_DEPS_INTEGRATIONS = {
"isal",
# Set log levels
"logger",
# Ensure network config is available
# before hassio or any other integration is
# loaded that might create an aiohttp client session
"network",
# Error logging
"system_log",
"sentry",
@@ -164,28 +161,23 @@ FRONTEND_INTEGRATIONS = {
# integrations can be removed and database migration status is
# visible in frontend
"frontend",
# Hassio is an after dependency of backup, after dependencies
# are not promoted from stage 2 to earlier stages, so we need to
# add it here. Hassio needs to be setup before backup, otherwise
# the backup integration will think we are a container/core install
# when using HAOS or Supervised install.
"hassio",
# Backup is an after dependency of frontend, after dependencies
# are not promoted from stage 2 to earlier stages, so we need to
# add it here.
"backup",
}
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
# The substage containing recorder should have no timeout, as it could cancel a database migration.
# Recorder freezes "recorder" timeout during a migration, but it does not freeze other timeouts.
# The substages preceding it should also have no timeout, until we ensure that the recorder
# is not accidentally promoted as a dependency of any of the integrations in them.
# If we add timeouts to the frontend substages, we should make sure they don't apply in recovery mode.
STAGE_0_INTEGRATIONS = (
# Load logging and http deps as soon as possible
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
# Setup frontend
("frontend", FRONTEND_INTEGRATIONS, None),
# Setup recorder
("recorder", {"recorder"}, None),
# Start up debuggers. Start these first in case they want to wait.
("debugger", {"debugpy"}, STAGE_0_SUBSTAGE_TIMEOUT),
# Zeroconf is used for mdns resolution in aiohttp client helper.
("zeroconf", {"zeroconf"}, STAGE_0_SUBSTAGE_TIMEOUT),
)
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb")
# Stage 1 integrations are not to be preimported in bootstrap.
RECORDER_INTEGRATIONS = {
# Setup after frontend
# To record data
"recorder",
}
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb", "zeroconf")
STAGE_1_INTEGRATIONS = {
# We need to make sure discovery integrations
# update their deps before stage 2 integrations
@@ -200,7 +192,6 @@ STAGE_1_INTEGRATIONS = {
# Ensure supervisor is available
"hassio",
}
DEFAULT_INTEGRATIONS = {
# These integrations are set up unless recovery mode is activated.
#
@@ -241,12 +232,22 @@ DEFAULT_INTEGRATIONS_SUPERVISOR = {
# These integrations are set up if using the Supervisor
"hassio",
}
CRITICAL_INTEGRATIONS = {
# Recovery mode is activated if these integrations fail to set up
"frontend",
}
SETUP_ORDER = (
# Load logging and http deps as soon as possible
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
# Setup frontend
("frontend", FRONTEND_INTEGRATIONS),
# Setup recorder
("recorder", RECORDER_INTEGRATIONS),
# Start up debuggers. Start these first in case they want to wait.
("debugger", DEBUGGER_INTEGRATIONS),
)
#
# Storage keys we are likely to load during startup
# in order of when we expect to load them.
@@ -321,10 +322,10 @@ async def async_setup_hass(
block_async_io.enable()
if not (recovery_mode := runtime_config.recovery_mode):
config_dict = None
basic_setup_success = False
config_dict = None
basic_setup_success = False
if not (recovery_mode := runtime_config.recovery_mode):
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
try:
@@ -342,43 +343,39 @@ async def async_setup_hass(
await async_from_config_dict(config_dict, hass) is not None
)
if config_dict is None:
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
if config_dict is None:
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
elif not basic_setup_success:
_LOGGER.warning(
"Unable to set up core integrations. Activating recovery mode"
)
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
elif not basic_setup_success:
_LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
elif any(
domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS
):
_LOGGER.warning(
"Detected that %s did not load. Activating recovery mode",
",".join(CRITICAL_INTEGRATIONS),
)
elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
_LOGGER.warning(
"Detected that %s did not load. Activating recovery mode",
",".join(CRITICAL_INTEGRATIONS),
)
old_config = hass.config
old_logging = hass.data.get(DATA_LOGGING)
old_config = hass.config
old_logging = hass.data.get(DATA_LOGGING)
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
if old_logging:
hass.data[DATA_LOGGING] = old_logging
hass.config.debug = old_config.debug
hass.config.skip_pip = old_config.skip_pip
hass.config.skip_pip_packages = old_config.skip_pip_packages
hass.config.internal_url = old_config.internal_url
hass.config.external_url = old_config.external_url
# Setup loader cache after the config dir has been set
loader.async_setup(hass)
if old_logging:
hass.data[DATA_LOGGING] = old_logging
hass.config.debug = old_config.debug
hass.config.skip_pip = old_config.skip_pip
hass.config.skip_pip_packages = old_config.skip_pip_packages
hass.config.internal_url = old_config.internal_url
hass.config.external_url = old_config.external_url
# Setup loader cache after the config dir has been set
loader.async_setup(hass)
if recovery_mode:
_LOGGER.info("Starting in recovery mode")
@@ -697,6 +694,7 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
return deps_dir
@core.callback
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
"""Get domains of components to set up."""
# Filter out the repeating and common config section [homeassistant]
@@ -892,52 +890,69 @@ async def _async_set_up_integrations(
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
hass, config
)
stage_2_domains = domains_to_setup.copy()
# Initialize recorder
if "recorder" in domains_to_setup:
recorder.async_initialize_recorder(hass)
# Initialize backup
if "backup" in domains_to_setup:
backup.async_initialize_backup(hass)
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
*(
(name, domain_group & domains_to_setup, timeout)
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
),
("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT),
pre_stage_domains = [
(name, domains_to_setup & domain_group) for name, domain_group in SETUP_ORDER
]
_LOGGER.info("Setting up stage 0 and 1")
for name, domain_group, timeout in stage_0_and_1_domains:
if not domain_group:
continue
# calculate what components to setup in what stage
stage_1_domains: set[str] = set()
_LOGGER.info("Setting up %s: %s", name, domain_group)
to_be_loaded = domain_group.copy()
to_be_loaded.update(
dep
for domain in domain_group
if (integration := integration_cache.get(domain)) is not None
for dep in integration.all_dependencies
)
async_set_domains_to_be_loaded(hass, to_be_loaded)
stage_2_domains -= to_be_loaded
# Find all dependencies of any dependency of any stage 1 integration that
# we plan on loading and promote them to stage 1. This is done only to not
# get misleading log messages
deps_promotion: set[str] = STAGE_1_INTEGRATIONS
while deps_promotion:
old_deps_promotion = deps_promotion
deps_promotion = set()
if timeout is None:
for domain in old_deps_promotion:
if domain not in domains_to_setup or domain in stage_1_domains:
continue
stage_1_domains.add(domain)
if (dep_itg := integration_cache.get(domain)) is None:
continue
deps_promotion.update(dep_itg.all_dependencies)
stage_2_domains = domains_to_setup - stage_1_domains
for name, domain_group in pre_stage_domains:
if domain_group:
stage_2_domains -= domain_group
_LOGGER.info("Setting up %s: %s", name, domain_group)
to_be_loaded = domain_group.copy()
to_be_loaded.update(
dep
for domain in domain_group
if (integration := integration_cache.get(domain)) is not None
for dep in integration.all_dependencies
)
async_set_domains_to_be_loaded(hass, to_be_loaded)
await _async_setup_multi_components(hass, domain_group, config)
else:
try:
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
await _async_setup_multi_components(hass, domain_group, config)
except TimeoutError:
_LOGGER.warning(
"Setup timed out for %s waiting on %s - moving forward",
name,
hass._active_tasks, # noqa: SLF001
)
# Enables after dependencies when setting up stage 1 domains
async_set_domains_to_be_loaded(hass, stage_1_domains)
# Start setup
if stage_1_domains:
_LOGGER.info("Setting up stage 1: %s", stage_1_domains)
try:
async with hass.timeout.async_timeout(
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
):
await _async_setup_multi_components(hass, stage_1_domains, config)
except TimeoutError:
_LOGGER.warning(
"Setup timed out for stage 1 waiting on %s - moving forward",
hass._active_tasks, # noqa: SLF001
)
# Add after dependencies when setting up stage 2 domains
async_set_domains_to_be_loaded(hass, stage_2_domains)
-1
View File
@@ -6,7 +6,6 @@
"azure_devops",
"azure_event_hub",
"azure_service_bus",
"azure_storage",
"microsoft_face_detect",
"microsoft_face_identify",
"microsoft_face",
-5
View File
@@ -1,5 +0,0 @@
{
"domain": "sensorpush",
"name": "SensorPush",
"integrations": ["sensorpush", "sensorpush_cloud"]
}
@@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"requirements": ["accuweather==4.1.0"],
"requirements": ["accuweather==4.0.0"],
"single_config_entry": true
}
+7 -2
View File
@@ -7,7 +7,7 @@ from dataclasses import dataclass
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
@@ -123,7 +123,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
"""Unload AdGuard Home config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if not hass.config_entries.async_loaded_entries(DOMAIN):
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
# This is the last loaded instance of AdGuard, deregister any services
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)
@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/airgradient",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["airgradient==0.9.2"],
"requirements": ["airgradient==0.9.1"],
"zeroconf": ["_airgradient._tcp.local."]
}
@@ -83,7 +83,6 @@ class AirQConfigFlow(ConfigFlow, domain=DOMAIN):
await self.async_set_unique_id(device_info["id"])
self._abort_if_unique_id_configured()
_LOGGER.debug("Creating an entry for %s", device_info["name"])
return self.async_create_entry(title=device_info["name"], data=user_input)
return self.async_show_form(
+2 -14
View File
@@ -5,7 +5,7 @@ from __future__ import annotations
from datetime import timedelta
import logging
from aioairq.core import AirQ, identify_warming_up_sensors
from aioairq import AirQ
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
@@ -55,9 +55,6 @@ class AirQCoordinator(DataUpdateCoordinator):
async def _async_update_data(self) -> dict:
"""Fetch the data from the device."""
if "name" not in self.device_info:
_LOGGER.debug(
"'name' not found in AirQCoordinator.device_info, fetching from the device"
)
info = await self.airq.fetch_device_info()
self.device_info.update(
DeviceInfo(
@@ -67,16 +64,7 @@ class AirQCoordinator(DataUpdateCoordinator):
hw_version=info["hw_version"],
)
)
_LOGGER.debug(
"Updated AirQCoordinator.device_info for 'name' %s",
self.device_info.get("name"),
)
data: dict = await self.airq.get_latest_data(
return await self.airq.get_latest_data( # type: ignore[no-any-return]
return_average=self.return_average,
clip_negative_values=self.clip_negative,
)
if warming_up_sensors := identify_warming_up_sensors(data):
_LOGGER.debug(
"Following sensors are still warming up: %s", warming_up_sensors
)
return data
@@ -2,7 +2,7 @@
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.core import HomeAssistant
from homeassistant.helpers import issue_registry as ir
@@ -28,13 +28,11 @@ async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return True
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Remove a config entry."""
if not hass.config_entries.async_loaded_entries(DOMAIN):
if all(
config_entry.state is ConfigEntryState.NOT_LOADED
for config_entry in hass.config_entries.async_entries(DOMAIN)
if config_entry.entry_id != entry.entry_id
):
ir.async_delete_issue(hass, DOMAIN, DOMAIN)
# Remove any remaining disabled or ignored entries
for _entry in hass.config_entries.async_entries(DOMAIN):
hass.async_create_task(hass.config_entries.async_remove(_entry.entry_id))
return True
@@ -90,7 +90,7 @@
},
"alarm_arm_home": {
"name": "Arm home",
"description": "Arms the alarm in the home mode.",
"description": "Sets the alarm to: _armed, but someone is home_.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@@ -100,7 +100,7 @@
},
"alarm_arm_away": {
"name": "Arm away",
"description": "Arms the alarm in the away mode.",
"description": "Sets the alarm to: _armed, no one home_.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@@ -110,7 +110,7 @@
},
"alarm_arm_night": {
"name": "Arm night",
"description": "Arms the alarm in the night mode.",
"description": "Sets the alarm to: _armed for the night_.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@@ -120,7 +120,7 @@
},
"alarm_arm_vacation": {
"name": "Arm vacation",
"description": "Arms the alarm in the vacation mode.",
"description": "Sets the alarm to: _armed for vacation_.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@@ -130,7 +130,7 @@
},
"alarm_trigger": {
"name": "Trigger",
"description": "Triggers the alarm manually.",
"description": "Trigger the alarm manually.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["adext", "alarmdecoder"],
"requirements": ["adext==0.4.4"]
"requirements": ["adext==0.4.3"]
}
+2 -3
View File
@@ -14,7 +14,7 @@ from homeassistant.components.notify import (
)
from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_ON
from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant
from homeassistant.exceptions import ServiceNotFound, ServiceValidationError
from homeassistant.exceptions import ServiceNotFound
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import (
async_track_point_in_time,
@@ -195,8 +195,7 @@ class AlertEntity(Entity):
async def async_turn_off(self, **kwargs: Any) -> None:
"""Async Acknowledge alert."""
if not self._can_ack:
raise ServiceValidationError("This alert cannot be acknowledged")
LOGGER.debug("Acknowledged Alert: %s", self._attr_name)
self._ack = True
self.async_write_ha_state()
@@ -239,7 +239,6 @@ SENSOR_DESCRIPTIONS = (
native_unit_of_measurement=DEGREE,
suggested_display_precision=0,
entity_registry_enabled_default=False,
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key=TYPE_WINDGUSTMPH,
@@ -608,25 +608,21 @@ SENSOR_DESCRIPTIONS = (
key=TYPE_WINDDIR,
translation_key="wind_direction",
native_unit_of_measurement=DEGREE,
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key=TYPE_WINDDIR_AVG10M,
translation_key="wind_direction_average_10m",
native_unit_of_measurement=DEGREE,
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key=TYPE_WINDDIR_AVG2M,
translation_key="wind_direction_average_2m",
native_unit_of_measurement=DEGREE,
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key=TYPE_WINDGUSTDIR,
translation_key="wind_gust_direction",
native_unit_of_measurement=DEGREE,
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key=TYPE_WINDGUSTMPH,
@@ -7,6 +7,6 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["androidtvremote2"],
"requirements": ["androidtvremote2==0.2.0"],
"requirements": ["androidtvremote2==0.1.2"],
"zeroconf": ["_androidtvremote2._tcp.local."]
}
@@ -10,7 +10,7 @@
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"cannot_receive_deviceinfo": "Failed to retrieve MAC Address. Make sure the device is turned on"
"cannot_receive_deviceinfo": "Failed to retreive MAC Address. Make sure the device is turned on"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
@@ -2,8 +2,6 @@
from __future__ import annotations
from functools import partial
import anthropic
from homeassistant.config_entries import ConfigEntry
@@ -22,9 +20,7 @@ type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
"""Set up Anthropic from a config entry."""
client = await hass.async_add_executor_job(
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
)
client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY])
try:
await client.messages.create(
model="claude-3-haiku-20240307",
@@ -2,7 +2,6 @@
from __future__ import annotations
from functools import partial
import logging
from types import MappingProxyType
from typing import Any
@@ -60,9 +59,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
"""
client = await hass.async_add_executor_job(
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
)
client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY])
await client.messages.create(
model="claude-3-haiku-20240307",
max_tokens=1,
@@ -1,23 +1,16 @@
"""Conversation support for Anthropic."""
from collections.abc import AsyncGenerator, Callable
from collections.abc import Callable
import json
from typing import Any, Literal
from typing import Any, Literal, cast
import anthropic
from anthropic import AsyncStream
from anthropic._types import NOT_GIVEN
from anthropic.types import (
InputJSONDelta,
Message,
MessageParam,
MessageStreamEvent,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawContentBlockStopEvent,
TextBlock,
TextBlockParam,
TextDelta,
ToolParam,
ToolResultBlockParam,
ToolUseBlock,
@@ -116,7 +109,7 @@ def _convert_content(chat_content: conversation.Content) -> MessageParam:
type="tool_use",
id=tool_call.id,
name=tool_call.tool_name,
input=tool_call.tool_args,
input=json.dumps(tool_call.tool_args),
)
for tool_call in chat_content.tool_calls or ()
],
@@ -131,66 +124,6 @@ def _convert_content(chat_content: conversation.Content) -> MessageParam:
raise ValueError(f"Unexpected content type: {type(chat_content)}")
async def _transform_stream(
result: AsyncStream[MessageStreamEvent],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
"""Transform the response stream into HA format.
A typical stream of responses might look something like the following:
- RawMessageStartEvent with no content
- RawContentBlockStartEvent with an empty TextBlock
- RawContentBlockDeltaEvent with a TextDelta
- RawContentBlockDeltaEvent with a TextDelta
- RawContentBlockDeltaEvent with a TextDelta
- ...
- RawContentBlockStopEvent
- RawContentBlockStartEvent with ToolUseBlock specifying the function name
- RawContentBlockDeltaEvent with a InputJSONDelta
- RawContentBlockDeltaEvent with a InputJSONDelta
- ...
- RawContentBlockStopEvent
- RawMessageDeltaEvent with a stop_reason='tool_use'
- RawMessageStopEvent(type='message_stop')
"""
if result is None:
raise TypeError("Expected a stream of messages")
current_tool_call: dict | None = None
async for response in result:
LOGGER.debug("Received response: %s", response)
if isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock):
current_tool_call = {
"id": response.content_block.id,
"name": response.content_block.name,
"input": "",
}
elif isinstance(response.content_block, TextBlock):
yield {"role": "assistant"}
elif isinstance(response, RawContentBlockDeltaEvent):
if isinstance(response.delta, InputJSONDelta):
if current_tool_call is None:
raise ValueError("Unexpected delta without a tool call")
current_tool_call["input"] += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
LOGGER.debug("yielding delta: %s", response.delta.text)
yield {"content": response.delta.text}
elif isinstance(response, RawContentBlockStopEvent):
if current_tool_call:
yield {
"tool_calls": [
llm.ToolInput(
id=current_tool_call["id"],
tool_name=current_tool_call["name"],
tool_args=json.loads(current_tool_call["input"]),
)
]
}
current_tool_call = None
class AnthropicConversationEntity(
conversation.ConversationEntity, conversation.AbstractConversationAgent
):
@@ -273,30 +206,58 @@ class AnthropicConversationEntity(
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
try:
stream = await client.messages.create(
response = await client.messages.create(
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
messages=messages,
tools=tools or NOT_GIVEN,
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
system=system.content,
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
stream=True,
)
except anthropic.AnthropicError as err:
raise HomeAssistantError(
f"Sorry, I had a problem talking to Anthropic: {err}"
) from err
messages.extend(
LOGGER.debug("Response %s", response)
messages.append(_message_convert(response))
text = "".join(
[
_convert_content(content)
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id, _transform_stream(stream)
)
content.text
for content in response.content
if isinstance(content, TextBlock)
]
)
tool_inputs = [
llm.ToolInput(
id=tool_call.id,
tool_name=tool_call.name,
tool_args=cast(dict[str, Any], tool_call.input),
)
for tool_call in response.content
if isinstance(tool_call, ToolUseBlock)
]
if not chat_log.unresponded_tool_results:
tool_results = [
ToolResultBlockParam(
type="tool_result",
tool_use_id=tool_response.tool_call_id,
content=json.dumps(tool_response.tool_result),
)
async for tool_response in chat_log.async_add_assistant_content(
conversation.AssistantContent(
agent_id=user_input.agent_id,
content=text,
tool_calls=tool_inputs or None,
)
)
]
if tool_results:
messages.append(MessageParam(role="user", content=tool_results))
if not tool_inputs:
break
response_content = chat_log.content[-1]
@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["anthropic==0.47.2"]
"requirements": ["anthropic==0.44.0"]
}
@@ -1 +0,0 @@
"""Virtual integration: Apollo Automation."""
@@ -1,6 +0,0 @@
{
"domain": "apollo_automation",
"name": "Apollo Automation",
"integration_type": "virtual",
"supported_by": "esphome"
}
@@ -233,6 +233,7 @@ class AppleTVManager(DeviceListener):
pass
except Exception:
_LOGGER.exception("Failed to connect")
await self.disconnect()
async def _connect_loop(self) -> None:
"""Connect loop background task function."""
+1 -11
View File
@@ -19,20 +19,10 @@ class ApSystemsEntity(Entity):
data: ApSystemsData,
) -> None:
"""Initialize the APsystems entity."""
# Handle device version safely
sw_version = None
if data.coordinator.device_version:
version_parts = data.coordinator.device_version.split(" ")
if len(version_parts) > 1:
sw_version = version_parts[1]
else:
sw_version = version_parts[0]
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, data.device_id)},
manufacturer="APsystems",
model="EZ1-M",
serial_number=data.device_id,
sw_version=sw_version,
sw_version=data.coordinator.device_version.split(" ")[1],
)
+2 -12
View File
@@ -5,7 +5,7 @@ from __future__ import annotations
from dataclasses import dataclass
from typing import Any
from aranet4.client import Aranet4Advertisement, Color
from aranet4.client import Aranet4Advertisement
from bleak.backends.device import BLEDevice
from homeassistant.components.bluetooth.passive_update_processor import (
@@ -74,13 +74,6 @@ SENSOR_DESCRIPTIONS = {
native_unit_of_measurement=UnitOfPressure.HPA,
state_class=SensorStateClass.MEASUREMENT,
),
"status": AranetSensorEntityDescription(
key="threshold",
translation_key="threshold",
name="Threshold",
device_class=SensorDeviceClass.ENUM,
options=[status.name.lower() for status in Color],
),
"co2": AranetSensorEntityDescription(
key="co2",
name="Carbon Dioxide",
@@ -168,10 +161,7 @@ def sensor_update_to_bluetooth_data_update(
val = getattr(adv.readings, key)
if val == -1:
continue
if key == "status":
val = val.name.lower()
else:
val *= desc.scale
val *= desc.scale
data[tag] = val
names[tag] = desc.name
descs[tag] = desc
@@ -21,17 +21,5 @@
"no_devices_found": "No unconfigured Aranet devices found.",
"outdated_version": "This device is using outdated firmware. Please update it to at least v1.2.0 and try again."
}
},
"entity": {
"sensor": {
"threshold": {
"state": {
"error": "Error",
"green": "Green",
"yellow": "Yellow",
"red": "Red"
}
}
}
}
}
@@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
"iot_class": "local_polling",
"loggers": ["arcam"],
"requirements": ["arcam-fmj==1.8.1"],
"requirements": ["arcam-fmj==1.5.2"],
"ssdp": [
{
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
+1 -6
View File
@@ -92,12 +92,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] |
device_class=SensorDeviceClass.WIND_SPEED,
),
ArwnSensor(
topic + "/dir",
"Wind Direction",
"direction",
DEGREE,
"mdi:compass",
device_class=SensorDeviceClass.WIND_DIRECTION,
topic + "/dir", "Wind Direction", "direction", DEGREE, "mdi:compass"
),
]
return None
@@ -13,7 +13,7 @@ from pathlib import Path
from queue import Empty, Queue
from threading import Thread
import time
from typing import TYPE_CHECKING, Any, Literal, cast
from typing import Any, Literal, cast
import wave
import hass_nabucasa
@@ -30,7 +30,7 @@ from homeassistant.components import (
from homeassistant.components.tts import (
generate_media_source_id as tts_generate_media_source_id,
)
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
from homeassistant.const import MATCH_ALL
from homeassistant.core import Context, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import chat_session, intent
@@ -81,9 +81,6 @@ from .error import (
)
from .vad import AudioBuffer, VoiceActivityTimeout, VoiceCommandSegmenter, chunk_samples
if TYPE_CHECKING:
from hassil.recognize import RecognizeResult
_LOGGER = logging.getLogger(__name__)
STORAGE_KEY = f"{DOMAIN}.pipelines"
@@ -126,12 +123,6 @@ STORED_PIPELINE_RUNS = 10
SAVE_DELAY = 10
@callback
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
"""Filter out intents that are not local fallback."""
return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND)
@callback
def _async_resolve_default_pipeline_settings(
hass: HomeAssistant,
@@ -1093,26 +1084,10 @@ class PipelineRun:
)
intent_response.async_set_speech(trigger_response_text)
intent_filter: Callable[[RecognizeResult], bool] | None = None
# If the LLM has API access, we filter out some sentences that are
# interfering with LLM operation.
if (
intent_agent_state := self.hass.states.get(self.intent_agent)
) and intent_agent_state.attributes.get(
ATTR_SUPPORTED_FEATURES, 0
) & conversation.ConversationEntityFeature.CONTROL:
intent_filter = _async_local_fallback_intent_filter
# Try local intents
if (
intent_response is None
and self.pipeline.prefer_local_intents
and (
intent_response := await conversation.async_handle_intents(
self.hass,
user_input,
intent_filter=intent_filter,
)
# Try local intents first, if preferred.
elif self.pipeline.prefer_local_intents and (
intent_response := await conversation.async_handle_intents(
self.hass, user_input
)
):
# Local intent matched
@@ -405,10 +405,7 @@ class AssistSatelliteEntity(entity.Entity):
def _internal_on_pipeline_event(self, event: PipelineEvent) -> None:
"""Set state based on pipeline stage."""
if event.type is PipelineEventType.WAKE_WORD_START:
# Only return to idle if we're not currently responding.
# The state will return to idle in tts_response_finished.
if self.state != AssistSatelliteState.RESPONDING:
self._set_state(AssistSatelliteState.IDLE)
self._set_state(AssistSatelliteState.IDLE)
elif event.type is PipelineEventType.STT_START:
self._set_state(AssistSatelliteState.LISTENING)
elif event.type is PipelineEventType.INTENT_START:
@@ -19,7 +19,6 @@ from .const import (
DOMAIN,
AssistSatelliteEntityFeature,
)
from .entity import AssistSatelliteConfiguration
CONNECTION_TEST_TIMEOUT = 30
@@ -92,16 +91,7 @@ def websocket_get_configuration(
)
return
try:
config_dict = asdict(satellite.async_get_configuration())
except NotImplementedError:
# Stub configuration
config_dict = asdict(
AssistSatelliteConfiguration(
available_wake_words=[], active_wake_words=[], max_active_wake_words=1
)
)
config_dict = asdict(satellite.async_get_configuration())
config_dict["pipeline_entity_id"] = satellite.pipeline_entity_id
config_dict["vad_entity_id"] = satellite.vad_sensitivity_entity_id
@@ -11,7 +11,7 @@
},
"error": {
"cannot_connect": "Unable to connect, please check serial port, address, electrical connection and that inverter is on (in daylight)",
"invalid_serial_port": "Serial port is not a valid device or could not be opened",
"invalid_serial_port": "Serial port is not a valid device or could not be openned",
"cannot_open_serial_port": "Cannot open serial port, please check and try again"
},
"abort": {
@@ -1,82 +0,0 @@
"""The Azure Storage integration."""
from aiohttp import ClientTimeout
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceNotFoundError,
)
from azure.core.pipeline.transport._aiohttp import (
AioHttpTransport,
) # need to import from private file, as it is not properly imported in the init
from azure.storage.blob.aio import ContainerClient
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import (
CONF_ACCOUNT_NAME,
CONF_CONTAINER_NAME,
CONF_STORAGE_ACCOUNT_KEY,
DATA_BACKUP_AGENT_LISTENERS,
DOMAIN,
)
type AzureStorageConfigEntry = ConfigEntry[ContainerClient]
async def async_setup_entry(
hass: HomeAssistant, entry: AzureStorageConfigEntry
) -> bool:
"""Set up Azure Storage integration."""
# set increase aiohttp timeout for long running operations (up/download)
session = async_create_clientsession(
hass, timeout=ClientTimeout(connect=10, total=12 * 60 * 60)
)
container_client = ContainerClient(
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
container_name=entry.data[CONF_CONTAINER_NAME],
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=session),
)
try:
if not await container_client.exists():
await container_client.create_container()
except ResourceNotFoundError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="account_not_found",
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
) from err
except ClientAuthenticationError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
) from err
except HttpResponseError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
) from err
entry.runtime_data = container_client
def _async_notify_backup_listeners() -> None:
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
listener()
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
return True
async def async_unload_entry(
hass: HomeAssistant, entry: AzureStorageConfigEntry
) -> bool:
"""Unload an Azure Storage config entry."""
return True
@@ -1,182 +0,0 @@
"""Support for Azure Storage backup."""
from __future__ import annotations
from collections.abc import AsyncIterator, Callable, Coroutine
from functools import wraps
import json
import logging
from typing import Any, Concatenate
from azure.core.exceptions import HttpResponseError
from azure.storage.blob import BlobProperties
from homeassistant.components.backup import (
AgentBackup,
BackupAgent,
BackupAgentError,
BackupNotFound,
suggested_filename,
)
from homeassistant.core import HomeAssistant, callback
from . import AzureStorageConfigEntry
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
_LOGGER = logging.getLogger(__name__)
METADATA_VERSION = "1"
async def async_get_backup_agents(
hass: HomeAssistant,
) -> list[BackupAgent]:
"""Return a list of backup agents."""
entries: list[AzureStorageConfigEntry] = hass.config_entries.async_loaded_entries(
DOMAIN
)
return [AzureStorageBackupAgent(hass, entry) for entry in entries]
@callback
def async_register_backup_agents_listener(
hass: HomeAssistant,
*,
listener: Callable[[], None],
**kwargs: Any,
) -> Callable[[], None]:
"""Register a listener to be called when agents are added or removed."""
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
@callback
def remove_listener() -> None:
"""Remove the listener."""
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
hass.data.pop(DATA_BACKUP_AGENT_LISTENERS)
return remove_listener
def handle_backup_errors[_R, **P](
func: Callable[Concatenate[AzureStorageBackupAgent, P], Coroutine[Any, Any, _R]],
) -> Callable[Concatenate[AzureStorageBackupAgent, P], Coroutine[Any, Any, _R]]:
"""Handle backup errors."""
@wraps(func)
async def wrapper(
self: AzureStorageBackupAgent, *args: P.args, **kwargs: P.kwargs
) -> _R:
try:
return await func(self, *args, **kwargs)
except HttpResponseError as err:
_LOGGER.debug(
"Error during backup in %s: Status %s, message %s",
func.__name__,
err.status_code,
err.message,
exc_info=True,
)
raise BackupAgentError(
f"Error during backup operation in {func.__name__}:"
f" Status {err.status_code}, message: {err.message}"
) from err
return wrapper
class AzureStorageBackupAgent(BackupAgent):
"""Azure storage backup agent."""
domain = DOMAIN
def __init__(self, hass: HomeAssistant, entry: AzureStorageConfigEntry) -> None:
"""Initialize the Azure storage backup agent."""
super().__init__()
self._client = entry.runtime_data
self.name = entry.title
self.unique_id = entry.entry_id
@handle_backup_errors
async def async_download_backup(
self,
backup_id: str,
**kwargs: Any,
) -> AsyncIterator[bytes]:
"""Download a backup file."""
blob = await self._find_blob_by_backup_id(backup_id)
if blob is None:
raise BackupNotFound(f"Backup {backup_id} not found")
download_stream = await self._client.download_blob(blob.name)
return download_stream.chunks()
@handle_backup_errors
async def async_upload_backup(
self,
*,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
backup: AgentBackup,
**kwargs: Any,
) -> None:
"""Upload a backup."""
metadata = {
"metadata_version": METADATA_VERSION,
"backup_id": backup.backup_id,
"backup_metadata": json.dumps(backup.as_dict()),
}
await self._client.upload_blob(
name=suggested_filename(backup),
metadata=metadata,
data=await open_stream(),
length=backup.size,
)
@handle_backup_errors
async def async_delete_backup(
self,
backup_id: str,
**kwargs: Any,
) -> None:
"""Delete a backup file."""
blob = await self._find_blob_by_backup_id(backup_id)
if blob is None:
return
await self._client.delete_blob(blob.name)
@handle_backup_errors
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
backups: list[AgentBackup] = []
async for blob in self._client.list_blobs(include="metadata"):
metadata = blob.metadata
if metadata.get("metadata_version") == METADATA_VERSION:
backups.append(
AgentBackup.from_dict(json.loads(metadata["backup_metadata"]))
)
return backups
@handle_backup_errors
async def async_get_backup(
self,
backup_id: str,
**kwargs: Any,
) -> AgentBackup | None:
"""Return a backup."""
blob = await self._find_blob_by_backup_id(backup_id)
if blob is None:
return None
return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"]))
async def _find_blob_by_backup_id(self, backup_id: str) -> BlobProperties | None:
"""Find a blob by backup id."""
async for blob in self._client.list_blobs(include="metadata"):
if (
backup_id == blob.metadata.get("backup_id", "")
and blob.metadata.get("metadata_version") == METADATA_VERSION
):
return blob
return None
@@ -1,72 +0,0 @@
"""Config flow for Azure Storage integration."""
import logging
from typing import Any
from azure.core.exceptions import ClientAuthenticationError, ResourceNotFoundError
from azure.core.pipeline.transport._aiohttp import (
AioHttpTransport,
) # need to import from private file, as it is not properly imported in the init
from azure.storage.blob.aio import ContainerClient
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
CONF_ACCOUNT_NAME,
CONF_CONTAINER_NAME,
CONF_STORAGE_ACCOUNT_KEY,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for azure storage."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""User step for Azure Storage."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match(
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
)
container_client = ContainerClient(
account_url=f"https://{user_input[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
container_name=user_input[CONF_CONTAINER_NAME],
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
)
try:
await container_client.exists()
except ResourceNotFoundError:
errors["base"] = "cannot_connect"
except ClientAuthenticationError:
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
except Exception:
_LOGGER.exception("Unknown exception occurred")
errors["base"] = "unknown"
if not errors:
return self.async_create_entry(
title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}",
data=user_input,
)
return self.async_show_form(
data_schema=vol.Schema(
{
vol.Required(CONF_ACCOUNT_NAME): str,
vol.Required(
CONF_CONTAINER_NAME, default="home-assistant-backups"
): str,
vol.Required(CONF_STORAGE_ACCOUNT_KEY): str,
}
),
errors=errors,
)
@@ -1,16 +0,0 @@
"""Constants for the Azure Storage integration."""
from collections.abc import Callable
from typing import Final
from homeassistant.util.hass_dict import HassKey
DOMAIN: Final = "azure_storage"
CONF_STORAGE_ACCOUNT_KEY: Final = "storage_account_key"
CONF_ACCOUNT_NAME: Final = "account_name"
CONF_CONTAINER_NAME: Final = "container_name"
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
f"{DOMAIN}.backup_agent_listeners"
)
@@ -1,12 +0,0 @@
{
"domain": "azure_storage",
"name": "Azure Storage",
"codeowners": ["@zweckj"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/azure_storage",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["azure-storage-blob"],
"quality_scale": "bronze",
"requirements": ["azure-storage-blob==12.24.0"]
}
@@ -1,133 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: Integration does not register custom actions.
appropriate-polling:
status: exempt
comment: |
This integration does not poll.
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not have any custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id:
status: exempt
comment: |
This integration does not have entities.
has-entity-name:
status: exempt
comment: |
This integration does not have entities.
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
This integration does not have any configuration parameters.
docs-installation-parameters: done
entity-unavailable:
status: exempt
comment: |
This integration does not have entities.
integration-owner: done
log-when-unavailable:
status: exempt
comment: |
This integration does not have entities.
parallel-updates:
status: exempt
comment: |
This integration does not have platforms.
reauthentication-flow: todo
test-coverage: done
# Gold
devices:
status: exempt
comment: |
This integration connects to a single service.
diagnostics:
status: exempt
comment: |
There is no data to diagnose.
discovery-update-info:
status: exempt
comment: |
This integration is a cloud service and does not support discovery.
discovery:
status: exempt
comment: |
This integration is a cloud service and does not support discovery.
docs-data-update:
status: exempt
comment: |
This integration does not poll or push.
docs-examples:
status: exempt
comment: |
This integration only serves backup.
docs-known-limitations: done
docs-supported-devices:
status: exempt
comment: |
This integration is a cloud service.
docs-supported-functions:
status: exempt
comment: |
This integration does not have entities.
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
This integration connects to a single service.
entity-category:
status: exempt
comment: |
This integration does not have entities.
entity-device-class:
status: exempt
comment: |
This integration does not have entities.
entity-disabled-by-default:
status: exempt
comment: |
This integration does not have entities.
entity-translations:
status: exempt
comment: |
This integration does not have entities.
exception-translations: done
icon-translations:
status: exempt
comment: |
This integration does not have entities.
reconfiguration-flow: todo
repair-issues: done
stale-devices:
status: exempt
comment: |
This integration connects to a single service.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done
@@ -1,48 +0,0 @@
{
"config": {
"error": {
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"user": {
"data": {
"storage_account_key": "Storage account key",
"account_name": "Account name",
"container_name": "Container name"
},
"data_description": {
"storage_account_key": "Storage account access key used for authorization",
"account_name": "Name of the storage account",
"container_name": "Name of the storage container to be used (will be created if it does not exist)"
},
"description": "Set up an Azure (Blob) storage account to be used for backups.",
"title": "Add Azure storage account"
}
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
}
},
"issues": {
"container_not_found": {
"title": "Storage container not found",
"description": "The storage container {container_name} has not been found in the storage account. Please re-create it manually, then fix this issue."
}
},
"exceptions": {
"account_not_found": {
"message": "Storage account {account_name} not found"
},
"cannot_connect": {
"message": "Can not connect to storage account {account_name}"
},
"invalid_auth": {
"message": "Authentication failed for storage account {account_name}"
},
"container_not_found": {
"message": "Storage container {container_name} not found"
}
}
}
+16 -14
View File
@@ -1,8 +1,8 @@
"""The Backup integration."""
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.backup import DATA_BACKUP
from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.typing import ConfigType
@@ -16,7 +16,6 @@ from .agent import (
BackupAgentPlatformProtocol,
LocalBackupAgent,
)
from .config import BackupConfig, CreateBackupParametersDict
from .const import DATA_MANAGER, DOMAIN
from .http import async_register_http_views
from .manager import (
@@ -32,7 +31,6 @@ from .manager import (
IdleEvent,
IncorrectPasswordError,
ManagerBackup,
ManagerStateEvent,
NewBackup,
RestoreBackupEvent,
RestoreBackupStage,
@@ -49,14 +47,12 @@ __all__ = [
"BackupAgent",
"BackupAgentError",
"BackupAgentPlatformProtocol",
"BackupConfig",
"BackupManagerError",
"BackupNotFound",
"BackupPlatformProtocol",
"BackupReaderWriter",
"BackupReaderWriterError",
"CreateBackupEvent",
"CreateBackupParametersDict",
"CreateBackupStage",
"CreateBackupState",
"Folder",
@@ -64,12 +60,12 @@ __all__ = [
"IncorrectPasswordError",
"LocalBackupAgent",
"ManagerBackup",
"ManagerStateEvent",
"NewBackup",
"RestoreBackupEvent",
"RestoreBackupStage",
"RestoreBackupState",
"WrittenBackup",
"async_get_manager",
"suggested_filename",
"suggested_filename_from_name_date",
]
@@ -92,13 +88,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
backup_manager = BackupManager(hass, reader_writer)
hass.data[DATA_MANAGER] = backup_manager
try:
await backup_manager.async_setup()
except Exception as err:
hass.data[DATA_BACKUP].manager_ready.set_exception(err)
raise
else:
hass.data[DATA_BACKUP].manager_ready.set_result(None)
await backup_manager.async_setup()
async_register_websocket_handlers(hass, with_hassio)
@@ -129,3 +119,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async_register_http_views(hass)
return True
@callback
def async_get_manager(hass: HomeAssistant) -> BackupManager:
"""Get the backup manager instance.
Raises HomeAssistantError if the backup integration is not available.
"""
if DATA_MANAGER not in hass.data:
raise HomeAssistantError("Backup integration is not available")
return hass.data[DATA_MANAGER]
@@ -1,38 +0,0 @@
"""Websocket commands for the Backup integration."""
from typing import Any
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.backup import async_subscribe_events
from .const import DATA_MANAGER
from .manager import ManagerStateEvent
@callback
def async_register_websocket_handlers(hass: HomeAssistant) -> None:
"""Register websocket commands."""
websocket_api.async_register_command(hass, handle_subscribe_events)
@websocket_api.require_admin
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
@websocket_api.async_response
async def handle_subscribe_events(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to backup events."""
def on_event(event: ManagerStateEvent) -> None:
connection.send_message(websocket_api.event_message(msg["id"], event))
if DATA_MANAGER in hass.data:
manager = hass.data[DATA_MANAGER]
on_event(manager.last_event)
connection.subscriptions[msg["id"]] = async_subscribe_events(hass, on_event)
connection.send_result(msg["id"])
+2 -60
View File
@@ -12,19 +12,16 @@ from typing import TYPE_CHECKING, Self, TypedDict
from cronsim import CronSim
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import issue_registry as ir
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
from homeassistant.util import dt as dt_util
from .const import DOMAIN, LOGGER
from .const import LOGGER
from .models import BackupManagerError, Folder
if TYPE_CHECKING:
from .manager import BackupManager, ManagerBackup
AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID = "automatic_backup_agents_unavailable"
CRON_PATTERN_DAILY = "{m} {h} * * *"
CRON_PATTERN_WEEKLY = "{m} {h} * * {d}"
@@ -42,7 +39,6 @@ class StoredBackupConfig(TypedDict):
"""Represent the stored backup config."""
agents: dict[str, StoredAgentConfig]
automatic_backups_configured: bool
create_backup: StoredCreateBackupConfig
last_attempted_automatic_backup: str | None
last_completed_automatic_backup: str | None
@@ -55,7 +51,6 @@ class BackupConfigData:
"""Represent loaded backup config data."""
agents: dict[str, AgentConfig]
automatic_backups_configured: bool # only used by frontend
create_backup: CreateBackupConfig
last_attempted_automatic_backup: datetime | None = None
last_completed_automatic_backup: datetime | None = None
@@ -93,7 +88,6 @@ class BackupConfigData:
agent_id: AgentConfig(protected=agent_data["protected"])
for agent_id, agent_data in data["agents"].items()
},
automatic_backups_configured=data["automatic_backups_configured"],
create_backup=CreateBackupConfig(
agent_ids=data["create_backup"]["agent_ids"],
include_addons=data["create_backup"]["include_addons"],
@@ -133,7 +127,6 @@ class BackupConfigData:
agents={
agent_id: agent.to_dict() for agent_id, agent in self.agents.items()
},
automatic_backups_configured=self.automatic_backups_configured,
create_backup=self.create_backup.to_dict(),
last_attempted_automatic_backup=last_attempted,
last_completed_automatic_backup=last_completed,
@@ -149,12 +142,10 @@ class BackupConfig:
"""Initialize backup config."""
self.data = BackupConfigData(
agents={},
automatic_backups_configured=False,
create_backup=CreateBackupConfig(),
retention=RetentionConfig(),
schedule=BackupSchedule(),
)
self._hass = hass
self._manager = manager
def load(self, stored_config: StoredBackupConfig) -> None:
@@ -163,12 +154,10 @@ class BackupConfig:
self.data.retention.apply(self._manager)
self.data.schedule.apply(self._manager)
@callback
def update(
async def update(
self,
*,
agents: dict[str, AgentParametersDict] | UndefinedType = UNDEFINED,
automatic_backups_configured: bool | UndefinedType = UNDEFINED,
create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED,
retention: RetentionParametersDict | UndefinedType = UNDEFINED,
schedule: ScheduleParametersDict | UndefinedType = UNDEFINED,
@@ -182,12 +171,8 @@ class BackupConfig:
self.data.agents[agent_id] = replace(
self.data.agents[agent_id], **agent_config
)
if automatic_backups_configured is not UNDEFINED:
self.data.automatic_backups_configured = automatic_backups_configured
if create_backup is not UNDEFINED:
self.data.create_backup = replace(self.data.create_backup, **create_backup)
if "agent_ids" in create_backup:
check_unavailable_agents(self._hass, self._manager)
if retention is not UNDEFINED:
new_retention = RetentionConfig(**retention)
if new_retention != self.data.retention:
@@ -568,46 +553,3 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
await manager.async_delete_filtered_backups(
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
)
@callback
def check_unavailable_agents(hass: HomeAssistant, manager: BackupManager) -> None:
"""Check for unavailable agents."""
if missing_agent_ids := set(manager.config.data.create_backup.agent_ids) - set(
manager.backup_agents
):
LOGGER.debug(
"Agents %s are configured for automatic backup but are unavailable",
missing_agent_ids,
)
# Remove issues for unavailable agents that are not unavailable anymore.
issue_registry = ir.async_get(hass)
existing_missing_agent_issue_ids = {
issue_id
for domain, issue_id in issue_registry.issues
if domain == DOMAIN
and issue_id.startswith(AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID)
}
current_missing_agent_issue_ids = {
f"{AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID}_{agent_id}": agent_id
for agent_id in missing_agent_ids
}
for issue_id in existing_missing_agent_issue_ids - set(
current_missing_agent_issue_ids
):
ir.async_delete_issue(hass, DOMAIN, issue_id)
for issue_id, agent_id in current_missing_agent_issue_ids.items():
ir.async_create_issue(
hass,
DOMAIN,
issue_id,
is_fixable=False,
learn_more_url="homeassistant://config/backup",
severity=ir.IssueSeverity.WARNING,
translation_key="automatic_backup_agents_unavailable",
translation_placeholders={
"agent_id": agent_id,
"backup_settings": "/config/backup/settings",
},
)
+22 -119
View File
@@ -14,7 +14,6 @@ from itertools import chain
import json
from pathlib import Path, PurePath
import shutil
import sys
import tarfile
import time
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
@@ -33,9 +32,7 @@ from homeassistant.helpers import (
instance_id,
integration_platform,
issue_registry as ir,
start,
)
from homeassistant.helpers.backup import DATA_BACKUP
from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util, json as json_util
@@ -46,12 +43,7 @@ from .agent import (
BackupAgentPlatformProtocol,
LocalBackupAgent,
)
from .config import (
BackupConfig,
CreateBackupParametersDict,
check_unavailable_agents,
delete_backups_exceeding_configured_count,
)
from .config import BackupConfig, delete_backups_exceeding_configured_count
from .const import (
BUF_SIZE,
DATA_MANAGER,
@@ -118,7 +110,6 @@ class BackupManagerState(StrEnum):
IDLE = "idle"
CREATE_BACKUP = "create_backup"
BLOCKED = "blocked"
RECEIVE_BACKUP = "receive_backup"
RESTORE_BACKUP = "restore_backup"
@@ -227,13 +218,6 @@ class RestoreBackupEvent(ManagerStateEvent):
state: RestoreBackupState
@dataclass(frozen=True, kw_only=True, slots=True)
class BlockedEvent(ManagerStateEvent):
"""Backup manager blocked, Home Assistant is starting."""
manager_state: BackupManagerState = BackupManagerState.BLOCKED
class BackupPlatformProtocol(Protocol):
"""Define the format that backup platforms can have."""
@@ -298,10 +282,6 @@ class BackupReaderWriter(abc.ABC):
) -> None:
"""Get restore events after core restart."""
@abc.abstractmethod
async def async_validate_config(self, *, config: BackupConfig) -> None:
"""Validate backup config."""
class IncorrectPasswordError(BackupReaderWriterError):
"""Raised when the password is incorrect."""
@@ -317,12 +297,6 @@ class DecryptOnDowloadNotSupported(BackupManagerError):
_message = "On-the-fly decryption is not supported for this backup."
class BackupManagerExceptionGroup(BackupManagerError, ExceptionGroup):
"""Raised when multiple exceptions occur."""
error_code = "multiple_errors"
class BackupManager:
"""Define the format that backup managers can have."""
@@ -348,11 +322,9 @@ class BackupManager:
self.remove_next_delete_event: Callable[[], None] | None = None
# Latest backup event and backup event subscribers
self.last_event: ManagerStateEvent = BlockedEvent()
self.last_event: ManagerStateEvent = IdleEvent()
self.last_non_idle_event: ManagerStateEvent | None = None
self._backup_event_subscriptions = hass.data[
DATA_BACKUP
].backup_event_subscriptions
self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = []
async def async_setup(self) -> None:
"""Set up the backup manager."""
@@ -361,20 +333,10 @@ class BackupManager:
self.config.load(stored["config"])
self.known_backups.load(stored["backups"])
await self._reader_writer.async_validate_config(config=self.config)
await self._reader_writer.async_resume_restore_progress_after_restart(
on_progress=self.async_on_backup_event
)
async def set_manager_idle_after_start(hass: HomeAssistant) -> None:
"""Set manager to idle after start."""
self.async_on_backup_event(IdleEvent())
if self.state == BackupManagerState.BLOCKED:
# If we're not finishing a restore job, set the manager to idle after start
start.async_at_started(self.hass, set_manager_idle_after_start)
await self.load_platforms()
@property
@@ -443,13 +405,6 @@ class BackupManager:
}
)
@callback
def check_unavailable_agents_after_start(hass: HomeAssistant) -> None:
"""Check unavailable agents after start."""
check_unavailable_agents(hass, self)
start.async_at_started(self.hass, check_unavailable_agents_after_start)
async def _add_platform(
self,
hass: HomeAssistant,
@@ -1310,11 +1265,24 @@ class BackupManager:
if (current_state := self.state) != (new_state := event.manager_state):
LOGGER.debug("Backup state: %s -> %s", current_state, new_state)
self.last_event = event
if not isinstance(event, (BlockedEvent, IdleEvent)):
if not isinstance(event, IdleEvent):
self.last_non_idle_event = event
for subscription in self._backup_event_subscriptions:
subscription(event)
@callback
def async_subscribe_events(
self,
on_event: Callable[[ManagerStateEvent], None],
) -> Callable[[], None]:
"""Subscribe events."""
def remove_subscription() -> None:
self._backup_event_subscriptions.remove(on_event)
self._backup_event_subscriptions.append(on_event)
return remove_subscription
def _update_issue_backup_failed(self) -> None:
"""Update issue registry when a backup fails."""
ir.async_create_issue(
@@ -1629,24 +1597,10 @@ class CoreBackupReaderWriter(BackupReaderWriter):
)
finally:
# Inform integrations the backup is done
# If there's an unhandled exception, we keep it so we can rethrow it in case
# the post backup actions also fail.
unhandled_exc = sys.exception()
try:
try:
await manager.async_post_backup_actions()
except BackupManagerError as err:
raise BackupReaderWriterError(str(err)) from err
except Exception as err:
if not unhandled_exc:
raise
# If there's an unhandled exception, we wrap both that and the exception
# from the post backup actions in an ExceptionGroup so the caller is
# aware of both exceptions.
raise BackupManagerExceptionGroup(
f"Multiple errors when creating backup: {unhandled_exc}, {err}",
[unhandled_exc, err],
) from None
await manager.async_post_backup_actions()
except BackupManagerError as err:
raise BackupReaderWriterError(str(err)) from err
def _mkdir_and_generate_backup_contents(
self,
@@ -1658,13 +1612,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
"""Generate backup contents and return the size."""
if not tar_file_path:
tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar"
try:
make_backup_dir(tar_file_path.parent)
except OSError as err:
raise BackupReaderWriterError(
f"Failed to create dir {tar_file_path.parent}: "
f"{err} ({err.__class__.__name__})"
) from err
make_backup_dir(tar_file_path.parent)
excludes = EXCLUDE_FROM_BACKUP
if not database_included:
@@ -1702,14 +1650,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
file_filter=is_excluded_by_filter,
arcname="data",
)
try:
stat_result = tar_file_path.stat()
except OSError as err:
raise BackupReaderWriterError(
f"Error getting size of {tar_file_path}: "
f"{err} ({err.__class__.__name__})"
) from err
return (tar_file_path, stat_result.st_size)
return (tar_file_path, tar_file_path.stat().st_size)
async def async_receive_backup(
self,
@@ -1891,44 +1832,6 @@ class CoreBackupReaderWriter(BackupReaderWriter):
)
on_progress(IdleEvent())
async def async_validate_config(self, *, config: BackupConfig) -> None:
"""Validate backup config.
Update automatic backup settings to not include addons or folders and remove
hassio agents in case a backup created by supervisor was restored.
"""
create_backup = config.data.create_backup
if (
not create_backup.include_addons
and not create_backup.include_all_addons
and not create_backup.include_folders
and not any(a_id.startswith("hassio.") for a_id in create_backup.agent_ids)
):
LOGGER.debug("Backup settings don't need to be adjusted")
return
LOGGER.info(
"Adjusting backup settings to not include addons, folders or supervisor locations"
)
automatic_agents = [
agent_id
for agent_id in create_backup.agent_ids
if not agent_id.startswith("hassio.")
]
if (
self._local_agent_id not in automatic_agents
and "hassio.local" in create_backup.agent_ids
):
automatic_agents = [self._local_agent_id, *automatic_agents]
config.update(
create_backup=CreateBackupParametersDict(
agent_ids=automatic_agents,
include_addons=None,
include_all_addons=False,
include_folders=None,
)
)
def _generate_backup_id(date: str, name: str) -> str:
"""Generate a backup ID."""
@@ -8,5 +8,5 @@
"integration_type": "system",
"iot_class": "calculated",
"quality_scale": "internal",
"requirements": ["cronsim==2.6", "securetar==2025.2.1"]
"requirements": ["cronsim==2.6", "securetar==2025.1.4"]
}
+1 -13
View File
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
STORE_DELAY_SAVE = 30
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 5
STORAGE_VERSION_MINOR = 3
class StoredBackupData(TypedDict):
@@ -60,18 +60,6 @@ class _BackupStore(Store[StoredBackupData]):
else:
data["config"]["schedule"]["days"] = [state]
data["config"]["schedule"]["recurrence"] = "custom_days"
if old_minor_version < 4:
# Workaround for a bug in frontend which incorrectly set days to 0
# instead of to None for unlimited retention.
if data["config"]["retention"]["copies"] == 0:
data["config"]["retention"]["copies"] = None
if data["config"]["retention"]["days"] == 0:
data["config"]["retention"]["days"] = None
if old_minor_version < 5:
# Version 1.5 adds automatic_backups_configured
data["config"]["automatic_backups_configured"] = (
data["config"]["create_backup"]["password"] is not None
)
# Note: We allow reading data with major version 2.
# Reject if major version is higher than 2.
@@ -1,9 +1,5 @@
{
"issues": {
"automatic_backup_agents_unavailable": {
"title": "The backup location {agent_id} is unavailable",
"description": "The backup location `{agent_id}` is unavailable but is still configured for automatic backups.\n\nPlease visit the [automatic backup configuration page]({backup_settings}) to review and update your backup locations. Backups will not be uploaded to selected locations that are unavailable."
},
"automatic_backup_failed_create": {
"title": "Automatic backup could not be created",
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
+2 -5
View File
@@ -104,15 +104,12 @@ def read_backup(backup_path: Path) -> AgentBackup:
bool, homeassistant.get("exclude_database", False)
)
extra_metadata = cast(dict[str, bool | str], data.get("extra", {}))
date = extra_metadata.get("supervisor.backup_request_date", data["date"])
return AgentBackup(
addons=addons,
backup_id=cast(str, data["slug"]),
database_included=database_included,
date=cast(str, date),
extra_metadata=extra_metadata,
date=cast(str, data["date"]),
extra_metadata=cast(dict[str, bool | str], data.get("extra", {})),
folders=folders,
homeassistant_included=homeassistant_included,
homeassistant_version=homeassistant_version,
+30 -9
View File
@@ -10,7 +10,11 @@ from homeassistant.helpers import config_validation as cv
from .config import Day, ScheduleRecurrence
from .const import DATA_MANAGER, LOGGER
from .manager import DecryptOnDowloadNotSupported, IncorrectPasswordError
from .manager import (
DecryptOnDowloadNotSupported,
IncorrectPasswordError,
ManagerStateEvent,
)
from .models import BackupNotFound, Folder
@@ -30,6 +34,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
websocket_api.async_register_command(hass, handle_delete)
websocket_api.async_register_command(hass, handle_restore)
websocket_api.async_register_command(hass, handle_subscribe_events)
websocket_api.async_register_command(hass, handle_config_info)
websocket_api.async_register_command(hass, handle_config_update)
@@ -341,13 +346,11 @@ async def handle_config_info(
)
@callback
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "backup/config/update",
vol.Optional("agents"): vol.Schema({str: {"protected": bool}}),
vol.Optional("automatic_backups_configured"): bool,
vol.Optional("create_backup"): vol.Schema(
{
vol.Optional("agent_ids"): vol.All([str], vol.Unique()),
@@ -365,10 +368,8 @@ async def handle_config_info(
),
vol.Optional("retention"): vol.Schema(
{
# Note: We can't use cv.positive_int because it allows 0 even
# though 0 is not positive.
vol.Optional("copies"): vol.Any(vol.All(int, vol.Range(min=1)), None),
vol.Optional("days"): vol.Any(vol.All(int, vol.Range(min=1)), None),
vol.Optional("copies"): vol.Any(int, None),
vol.Optional("days"): vol.Any(int, None),
},
),
vol.Optional("schedule"): vol.Schema(
@@ -384,7 +385,8 @@ async def handle_config_info(
),
}
)
def handle_config_update(
@websocket_api.async_response
async def handle_config_update(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
@@ -394,5 +396,24 @@ def handle_config_update(
changes = dict(msg)
changes.pop("id")
changes.pop("type")
manager.config.update(**changes)
await manager.config.update(**changes)
connection.send_result(msg["id"])
@websocket_api.require_admin
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
@websocket_api.async_response
async def handle_subscribe_events(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to backup events."""
def on_event(event: ManagerStateEvent) -> None:
connection.send_message(websocket_api.event_message(msg["id"], event))
manager = hass.data[DATA_MANAGER]
on_event(manager.last_event)
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
connection.send_result(msg["id"])
@@ -24,8 +24,6 @@ PLATFORMS = [
Platform.FAN,
Platform.LIGHT,
Platform.SELECT,
Platform.SWITCH,
Platform.TIME,
]
@@ -14,5 +14,5 @@
"documentation": "https://www.home-assistant.io/integrations/balboa",
"iot_class": "local_push",
"loggers": ["pybalboa"],
"requirements": ["pybalboa==1.1.3"]
"requirements": ["pybalboa==1.1.2"]
}
@@ -78,19 +78,6 @@
"high": "High"
}
}
},
"switch": {
"filter_cycle_2_enabled": {
"name": "Filter cycle 2 enabled"
}
},
"time": {
"filter_cycle_start": {
"name": "Filter cycle {index} start"
},
"filter_cycle_end": {
"name": "Filter cycle {index} end"
}
}
}
}
-48
View File
@@ -1,48 +0,0 @@
"""Support for Balboa switches."""
from __future__ import annotations
from typing import Any
from pybalboa import SpaClient
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BalboaConfigEntry
from .entity import BalboaEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: BalboaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the spa's switches."""
spa = entry.runtime_data
async_add_entities([BalboaSwitchEntity(spa)])
class BalboaSwitchEntity(BalboaEntity, SwitchEntity):
"""Representation of a Balboa switch entity."""
def __init__(self, spa: SpaClient) -> None:
"""Initialize a Balboa switch entity."""
super().__init__(spa, "filter_cycle_2_enabled")
self._attr_entity_category = EntityCategory.CONFIG
self._attr_translation_key = "filter_cycle_2_enabled"
@property
def is_on(self) -> bool:
"""Return True if entity is on."""
return self._client.filter_cycle_2_enabled
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
await self._client.configure_filter_cycle(2, enabled=True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
await self._client.configure_filter_cycle(2, enabled=False)
-56
View File
@@ -1,56 +0,0 @@
"""Support for Balboa times."""
from __future__ import annotations
from datetime import time
import itertools
from typing import Any
from pybalboa import SpaClient
from homeassistant.components.time import TimeEntity
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BalboaConfigEntry
from .entity import BalboaEntity
FILTER_CYCLE = "filter_cycle_"
async def async_setup_entry(
hass: HomeAssistant,
entry: BalboaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the spa's times."""
spa = entry.runtime_data
async_add_entities(
BalboaTimeEntity(spa, index, period)
for index, period in itertools.product((1, 2), ("start", "end"))
)
class BalboaTimeEntity(BalboaEntity, TimeEntity):
"""Representation of a Balboa time entity."""
entity_category = EntityCategory.CONFIG
def __init__(self, spa: SpaClient, index: int, period: str) -> None:
"""Initialize a Balboa time entity."""
super().__init__(spa, f"{FILTER_CYCLE}{index}_{period}")
self.index = index
self.period = period
self._attr_translation_key = f"{FILTER_CYCLE}{period}"
self._attr_translation_placeholders = {"index": str(index)}
@property
def native_value(self) -> time | None:
"""Return the value reported by the time."""
return getattr(self._client, f"{FILTER_CYCLE}{self.index}_{self.period}")
async def async_set_value(self, value: time) -> None:
"""Change the time."""
args: dict[str, Any] = {self.period: value}
await self._client.configure_filter_cycle(self.index, **args)
@@ -4,13 +4,12 @@ from __future__ import annotations
from typing import TYPE_CHECKING, Any
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import BangOlufsenConfigEntry
from .const import DEVICE_BUTTONS, DOMAIN
from .const import DOMAIN
async def async_get_config_entry_diagnostics(
@@ -26,9 +25,8 @@ async def async_get_config_entry_diagnostics(
if TYPE_CHECKING:
assert config_entry.unique_id
entity_registry = er.async_get(hass)
# Add media_player entity's state
entity_registry = er.async_get(hass)
if entity_id := entity_registry.async_get_entity_id(
MEDIA_PLAYER_DOMAIN, DOMAIN, config_entry.unique_id
):
@@ -39,16 +37,4 @@ async def async_get_config_entry_diagnostics(
state_dict.pop("context")
data["media_player"] = state_dict
# Add button Event entity states (if enabled)
for device_button in DEVICE_BUTTONS:
if entity_id := entity_registry.async_get_entity_id(
EVENT_DOMAIN, DOMAIN, f"{config_entry.unique_id}_{device_button}"
):
if state := hass.states.get(entity_id):
state_dict = dict(state.as_dict())
# Remove context as it is not relevant
state_dict.pop("context")
data[f"{device_button}_event"] = state_dict
return data
@@ -5,14 +5,14 @@
"title": "Manual YAML fix required for Bayesian"
},
"no_prob_given_false": {
"description": "In the Bayesian integration `prob_given_false` is now a required configuration variable as there was no mathematical rationale for the previous default value. Please add this to your `configuration.yaml` for `bayesian/{entity}`. These observations will be ignored until you do.",
"description": "In the Bayesian integration `prob_given_false` is now a required configuration variable as there was no mathematical rationale for the previous default value. Please add this to your `configuration.yml` for `bayesian/{entity}`. These observations will be ignored until you do.",
"title": "Manual YAML addition required for Bayesian"
}
},
"services": {
"reload": {
"name": "[%key:common::action::reload%]",
"description": "Reloads Bayesian sensors from the YAML-configuration."
"description": "Reloads bayesian sensors from the YAML-configuration."
}
}
}
@@ -28,7 +28,7 @@
"name": "Activity",
"state": {
"available": "Available",
"charging": "[%key:common::state::charging%]",
"charging": "Charging",
"unavailable": "Unavailable",
"error": "Error",
"offline": "Offline"
@@ -75,9 +75,6 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN):
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
"""Handle a flow initialized by zeroconf discovery."""
# the player can have an ipv6 address, but the api is only available on ipv4
if discovery_info.ip_address.version != 4:
return self.async_abort(reason="no_ipv4_address")
if discovery_info.port is not None:
self._port = discovery_info.port
@@ -19,8 +19,7 @@
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"no_ipv4_address": "No IPv4 address found."
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
@@ -16,11 +16,11 @@
"quality_scale": "internal",
"requirements": [
"bleak==0.22.3",
"bleak-retry-connector==3.9.0",
"bleak-retry-connector==3.8.1",
"bluetooth-adapters==0.21.4",
"bluetooth-auto-recovery==1.4.4",
"bluetooth-auto-recovery==1.4.2",
"bluetooth-data-tools==1.23.4",
"dbus-fast==2.33.0",
"habluetooth==3.24.1"
"habluetooth==3.21.1"
]
}
@@ -138,7 +138,7 @@
"name": "Charging status",
"state": {
"default": "Default",
"charging": "[%key:common::state::charging%]",
"charging": "Charging",
"error": "Error",
"complete": "Complete",
"fully_charged": "Fully charged",
-7
View File
@@ -91,13 +91,6 @@ BUTTONS: tuple[BondButtonEntityDescription, ...] = (
mutually_exclusive=Action.SET_BRIGHTNESS,
argument=None,
),
BondButtonEntityDescription(
key=Action.TOGGLE_LIGHT_TEMP,
name="Toggle Light Temperature",
translation_key="toggle_light_temp",
mutually_exclusive=None, # No mutually exclusive action
argument=None,
),
BondButtonEntityDescription(
key=Action.START_UP_LIGHT_DIMMER,
name="Start Up Light Dimmer",
+7 -7
View File
@@ -31,7 +31,7 @@
"services": {
"set_fan_speed_tracked_state": {
"name": "Set fan speed tracked state",
"description": "Sets the tracked fan speed for a Bond fan.",
"description": "Sets the tracked fan speed for a bond fan.",
"fields": {
"entity_id": {
"name": "Entity",
@@ -45,7 +45,7 @@
},
"set_switch_power_tracked_state": {
"name": "Set switch power tracked state",
"description": "Sets the tracked power state of a Bond switch.",
"description": "Sets the tracked power state of a bond switch.",
"fields": {
"entity_id": {
"name": "Entity",
@@ -59,7 +59,7 @@
},
"set_light_power_tracked_state": {
"name": "Set light power tracked state",
"description": "Sets the tracked power state of a Bond light.",
"description": "Sets the tracked power state of a bond light.",
"fields": {
"entity_id": {
"name": "Entity",
@@ -73,7 +73,7 @@
},
"set_light_brightness_tracked_state": {
"name": "Set light brightness tracked state",
"description": "Sets the tracked brightness state of a Bond light.",
"description": "Sets the tracked brightness state of a bond light.",
"fields": {
"entity_id": {
"name": "Entity",
@@ -87,15 +87,15 @@
},
"start_increasing_brightness": {
"name": "Start increasing brightness",
"description": "Starts increasing the brightness of the light (deprecated)."
"description": "Start increasing the brightness of the light. (deprecated)."
},
"start_decreasing_brightness": {
"name": "Start decreasing brightness",
"description": "Starts decreasing the brightness of the light (deprecated)."
"description": "Start decreasing the brightness of the light. (deprecated)."
},
"stop": {
"name": "[%key:common::action::stop%]",
"description": "Stops any in-progress action and empty the queue (deprecated)."
"description": "Stop any in-progress action and empty the queue. (deprecated)."
}
}
}
+3 -6
View File
@@ -77,12 +77,9 @@ class BringEventEntity(BringBaseEntity, EventEntity):
attributes = asdict(activity.content)
attributes["last_activity_by"] = next(
(
x.name
for x in bring_list.users.users
if x.publicUuid == activity.content.publicUserUuid
),
None,
x.name
for x in bring_list.users.users
if x.publicUuid == activity.content.publicUserUuid
)
self._trigger_event(
@@ -7,6 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["bring_api"],
"quality_scale": "platinum",
"requirements": ["bring-api==1.0.2"]
}
@@ -10,9 +10,9 @@ rules:
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
docs-high-level-description: todo
docs-installation-instructions: todo
docs-removal-instructions: todo
entity-event-setup:
status: exempt
comment: The integration registers no events
@@ -26,10 +26,8 @@ rules:
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: Integration has no configuration parameters
docs-installation-parameters: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable: done
integration-owner: done
log-when-unavailable:
@@ -48,15 +46,13 @@ rules:
discovery:
status: exempt
comment: Integration is a service and has no devices.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices:
status: exempt
comment: Integration is a service and has no devices.
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: done
entity-category: done
entity-device-class: done
+3 -3
View File
@@ -17,13 +17,13 @@ class BroadlinkEntity(Entity):
self._device = device
self._coordinator = device.update_manager.coordinator
async def async_added_to_hass(self) -> None:
async def async_added_to_hass(self):
"""Call when the entity is added to hass."""
self.async_on_remove(self._coordinator.async_add_listener(self._recv_data))
if self._coordinator.data:
self._update_state(self._coordinator.data)
async def async_update(self) -> None:
async def async_update(self):
"""Update the state of the entity."""
await self._coordinator.async_request_refresh()
@@ -49,7 +49,7 @@ class BroadlinkEntity(Entity):
"""
@property
def available(self) -> bool:
def available(self):
"""Return True if the entity is available."""
return self._device.available
+1 -1
View File
@@ -30,7 +30,7 @@
"message": "Can't set preset mode to {preset_mode} when HVAC mode is not set to auto"
},
"set_data_error": {
"message": "An error occurred while sending the data to the BSB-Lan device"
"message": "An error occurred while sending the data to the BSBLAN device"
},
"set_temperature_error": {
"message": "An error occurred while setting the temperature"
@@ -169,7 +169,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
translation_key="windazimuth",
native_unit_of_measurement=DEGREE,
icon="mdi:compass-outline",
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key="pressure",
@@ -531,35 +530,30 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
translation_key="windazimuth_1d",
native_unit_of_measurement=DEGREE,
icon="mdi:compass-outline",
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key="windazimuth_2d",
translation_key="windazimuth_2d",
native_unit_of_measurement=DEGREE,
icon="mdi:compass-outline",
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key="windazimuth_3d",
translation_key="windazimuth_3d",
native_unit_of_measurement=DEGREE,
icon="mdi:compass-outline",
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key="windazimuth_4d",
translation_key="windazimuth_4d",
native_unit_of_measurement=DEGREE,
icon="mdi:compass-outline",
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key="windazimuth_5d",
translation_key="windazimuth_5d",
native_unit_of_measurement=DEGREE,
icon="mdi:compass-outline",
device_class=SensorDeviceClass.WIND_DIRECTION,
),
SensorEntityDescription(
key="condition_1d",
@@ -1 +0,0 @@
"""Virtual integration: Burbank Water and Power (BWP)."""
@@ -1,6 +0,0 @@
{
"domain": "burbank_water_and_power",
"name": "Burbank Water and Power (BWP)",
"integration_type": "virtual",
"supported_by": "opower"
}
@@ -153,27 +153,6 @@ def _has_min_duration(
return validate
def _has_positive_interval(
start_key: str, end_key: str, duration_key: str
) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that the time span between start and end is greater than zero."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
if (duration := obj.get(duration_key)) is not None:
if duration <= datetime.timedelta(seconds=0):
raise vol.Invalid(f"Expected positive duration ({duration})")
return obj
if (start := obj.get(start_key)) and (end := obj.get(end_key)):
if start >= end:
raise vol.Invalid(
f"Expected end time to be after start time ({start}, {end})"
)
return obj
return validate
def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all values are of the same type."""
@@ -302,7 +281,6 @@ SERVICE_GET_EVENTS_SCHEMA: Final = vol.All(
),
}
),
_has_positive_interval(EVENT_START_DATETIME, EVENT_END_DATETIME, EVENT_DURATION),
)
@@ -892,7 +870,6 @@ async def async_get_events_service(
end = start + service_call.data[EVENT_DURATION]
else:
end = service_call.data[EVENT_END_DATETIME]
calendar_event_list = await calendar.async_get_events(
calendar.hass, dt_util.as_local(start), dt_util.as_local(end)
)
@@ -8,6 +8,6 @@
"iot_class": "local_push",
"loggers": ["aiostreammagic"],
"quality_scale": "platinum",
"requirements": ["aiostreammagic==2.11.0"],
"requirements": ["aiostreammagic==2.10.0"],
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
}
@@ -104,7 +104,7 @@ class CiscoDeviceScanner(DeviceScanner):
"""Open connection to the router and get arp entries."""
try:
cisco_ssh: pxssh.pxssh[str] = pxssh.pxssh(encoding="utf-8")
cisco_ssh: pxssh.pxssh[str] = pxssh.pxssh(encoding="uft-8")
cisco_ssh.login(
self.host,
self.username,
@@ -68,6 +68,7 @@ from .const import ( # noqa: F401
FAN_ON,
FAN_TOP,
HVAC_MODES,
INTENT_GET_TEMPERATURE,
INTENT_SET_TEMPERATURE,
PRESET_ACTIVITY,
PRESET_AWAY,
@@ -126,6 +126,7 @@ DEFAULT_MAX_HUMIDITY = 99
DOMAIN = "climate"
INTENT_GET_TEMPERATURE = "HassClimateGetTemperature"
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
SERVICE_SET_AUX_HEAT = "set_aux_heat"
+42 -1
View File
@@ -1,4 +1,4 @@
"""Intents for the climate integration."""
"""Intents for the client integration."""
from __future__ import annotations
@@ -11,6 +11,7 @@ from homeassistant.helpers import config_validation as cv, intent
from . import (
ATTR_TEMPERATURE,
DOMAIN,
INTENT_GET_TEMPERATURE,
INTENT_SET_TEMPERATURE,
SERVICE_SET_TEMPERATURE,
ClimateEntityFeature,
@@ -19,9 +20,49 @@ from . import (
async def async_setup_intents(hass: HomeAssistant) -> None:
"""Set up the climate intents."""
intent.async_register(hass, GetTemperatureIntent())
intent.async_register(hass, SetTemperatureIntent())
class GetTemperatureIntent(intent.IntentHandler):
"""Handle GetTemperature intents."""
intent_type = INTENT_GET_TEMPERATURE
description = "Gets the current temperature of a climate device or entity"
slot_schema = {
vol.Optional("area"): intent.non_empty_string,
vol.Optional("name"): intent.non_empty_string,
}
platforms = {DOMAIN}
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
"""Handle the intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
name: str | None = None
if "name" in slots:
name = slots["name"]["value"]
area: str | None = None
if "area" in slots:
area = slots["area"]["value"]
match_constraints = intent.MatchTargetsConstraints(
name=name, area_name=area, domains=[DOMAIN], assistant=intent_obj.assistant
)
match_result = intent.async_match_targets(hass, match_constraints)
if not match_result.is_match:
raise intent.MatchFailedError(
result=match_result, constraints=match_constraints
)
response = intent_obj.create_response()
response.response_type = intent.IntentResponseType.QUERY_ANSWER
response.async_set_states(matched_states=match_result.states)
return response
class SetTemperatureIntent(intent.IntentHandler):
"""Handle SetTemperature intents."""
+1 -25
View File
@@ -6,7 +6,6 @@ import asyncio
from collections.abc import Awaitable, Callable
from datetime import datetime, timedelta
from enum import Enum
import logging
from typing import cast
from hass_nabucasa import Cloud
@@ -20,7 +19,6 @@ from homeassistant.const import (
CONF_NAME,
CONF_REGION,
EVENT_HOMEASSISTANT_STOP,
FORMAT_DATETIME,
Platform,
)
from homeassistant.core import Event, HassJob, HomeAssistant, ServiceCall, callback
@@ -35,7 +33,7 @@ from homeassistant.helpers.dispatcher import (
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_integration, bind_hass
from homeassistant.loader import bind_hass
from homeassistant.util.signal_type import SignalType
# Pre-import backup to avoid it being imported
@@ -64,13 +62,11 @@ from .const import (
CONF_THINGTALK_SERVER,
CONF_USER_POOL_ID,
DATA_CLOUD,
DATA_CLOUD_LOG_HANDLER,
DATA_PLATFORMS_SETUP,
DOMAIN,
MODE_DEV,
MODE_PROD,
)
from .helpers import FixedSizeQueueLogHandler
from .prefs import CloudPreferences
from .repairs import async_manage_legacy_subscription_issue
from .subscription import async_subscription_info
@@ -249,8 +245,6 @@ def async_remote_ui_url(hass: HomeAssistant) -> str:
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Initialize the Home Assistant cloud."""
log_handler = hass.data[DATA_CLOUD_LOG_HANDLER] = await _setup_log_handler(hass)
# Process configs
if DOMAIN in config:
kwargs = dict(config[DOMAIN])
@@ -273,8 +267,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def _shutdown(event: Event) -> None:
"""Shutdown event."""
await cloud.stop()
logging.root.removeHandler(log_handler)
del hass.data[DATA_CLOUD_LOG_HANDLER]
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
@@ -413,19 +405,3 @@ def _setup_services(hass: HomeAssistant, prefs: CloudPreferences) -> None:
async_register_admin_service(
hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler
)
async def _setup_log_handler(hass: HomeAssistant) -> FixedSizeQueueLogHandler:
fmt = (
"%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
)
handler = FixedSizeQueueLogHandler()
handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
integration = await async_get_integration(hass, DOMAIN)
loggers: set[str] = {integration.pkg_path, *(integration.loggers or [])}
for logger_name in loggers:
logging.getLogger(logger_name).addHandler(handler)
return handler
+33 -41
View File
@@ -3,20 +3,17 @@
from __future__ import annotations
import asyncio
import base64
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
import hashlib
import logging
import random
from typing import Any
from typing import Any, Literal
from aiohttp import ClientError
from hass_nabucasa import Cloud, CloudError
from hass_nabucasa.api import CloudApiNonRetryableError
from hass_nabucasa.cloud_api import (
FilesHandlerListEntry,
async_files_delete_file,
async_files_list,
)
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
from hass_nabucasa.cloud_api import async_files_delete_file, async_files_list
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
from homeassistant.core import HomeAssistant, callback
@@ -27,11 +24,20 @@ from .client import CloudClient
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
_LOGGER = logging.getLogger(__name__)
_STORAGE_BACKUP: Literal["backup"] = "backup"
_RETRY_LIMIT = 5
_RETRY_SECONDS_MIN = 60
_RETRY_SECONDS_MAX = 600
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
"""Calculate the MD5 hash of a file."""
file_hash = hashlib.md5()
async for chunk in stream:
file_hash.update(chunk)
return base64.b64encode(file_hash.digest()).decode()
async def async_get_backup_agents(
hass: HomeAssistant,
**kwargs: Any,
@@ -80,6 +86,11 @@ class CloudBackupAgent(BackupAgent):
self._cloud = cloud
self._hass = hass
@callback
def _get_backup_filename(self) -> str:
"""Return the backup filename."""
return f"{self._cloud.client.prefs.instance_id}.tar"
async def async_download_backup(
self,
backup_id: str,
@@ -90,13 +101,13 @@ class CloudBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes.
"""
if not (backup := await self._async_get_backup(backup_id)):
if not await self.async_get_backup(backup_id):
raise BackupAgentError("Backup not found")
try:
content = await self._cloud.files.download(
storage_type=StorageType.BACKUP,
filename=backup["Key"],
storage_type=_STORAGE_BACKUP,
filename=self._get_backup_filename(),
)
except CloudError as err:
raise BackupAgentError(f"Failed to download backup: {err}") from err
@@ -118,19 +129,16 @@ class CloudBackupAgent(BackupAgent):
if not backup.protected:
raise BackupAgentError("Cloud backups must be protected")
size = backup.size
try:
base64md5hash = await calculate_b64md5(open_stream, size)
except FilesError as err:
raise BackupAgentError(err) from err
filename = f"{self._cloud.client.prefs.instance_id}.tar"
base64md5hash = await _b64md5(await open_stream())
filename = self._get_backup_filename()
metadata = backup.as_dict()
size = backup.size
tries = 1
while tries <= _RETRY_LIMIT:
try:
await self._cloud.files.upload(
storage_type=StorageType.BACKUP,
storage_type=_STORAGE_BACKUP,
open_stream=open_stream,
filename=filename,
base64md5hash=base64md5hash,
@@ -171,34 +179,27 @@ class CloudBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups.
"""
if not (backup := await self._async_get_backup(backup_id)):
if not await self.async_get_backup(backup_id):
return
try:
await async_files_delete_file(
self._cloud,
storage_type=StorageType.BACKUP,
filename=backup["Key"],
storage_type=_STORAGE_BACKUP,
filename=self._get_backup_filename(),
)
except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to delete backup") from err
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
backups = await self._async_list_backups()
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
async def _async_list_backups(self) -> list[FilesHandlerListEntry]:
"""List backups."""
try:
backups = await async_files_list(
self._cloud, storage_type=StorageType.BACKUP
)
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
_LOGGER.debug("Cloud backups: %s", backups)
except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to list backups") from err
_LOGGER.debug("Cloud backups: %s", backups)
return backups
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
async def async_get_backup(
self,
@@ -206,19 +207,10 @@ class CloudBackupAgent(BackupAgent):
**kwargs: Any,
) -> AgentBackup | None:
"""Return a backup."""
if not (backup := await self._async_get_backup(backup_id)):
return None
return AgentBackup.from_dict(backup["Metadata"])
async def _async_get_backup(
self,
backup_id: str,
) -> FilesHandlerListEntry | None:
"""Return a backup."""
backups = await self._async_list_backups()
backups = await self.async_list_backups()
for backup in backups:
if backup["Metadata"]["backup_id"] == backup_id:
if backup.backup_id == backup_id:
return backup
return None
-2
View File
@@ -12,14 +12,12 @@ if TYPE_CHECKING:
from hass_nabucasa import Cloud
from .client import CloudClient
from .helpers import FixedSizeQueueLogHandler
DOMAIN = "cloud"
DATA_CLOUD: HassKey[Cloud[CloudClient]] = HassKey(DOMAIN)
DATA_PLATFORMS_SETUP: HassKey[dict[str, asyncio.Event]] = HassKey(
"cloud_platforms_setup"
)
DATA_CLOUD_LOG_HANDLER: HassKey[FixedSizeQueueLogHandler] = HassKey("cloud_log_handler")
EVENT_CLOUD_EVENT = "cloud_event"
REQUEST_TIMEOUT = 10
-31
View File
@@ -1,31 +0,0 @@
"""Helpers for the cloud component."""
from collections import deque
import logging
from homeassistant.core import HomeAssistant
class FixedSizeQueueLogHandler(logging.Handler):
"""Log handler to store messages, with auto rotation."""
MAX_RECORDS = 500
def __init__(self) -> None:
"""Initialize a new LogHandler."""
super().__init__()
self._records: deque[logging.LogRecord] = deque(maxlen=self.MAX_RECORDS)
def emit(self, record: logging.LogRecord) -> None:
"""Store log message."""
self._records.append(record)
async def get_logs(self, hass: HomeAssistant) -> list[str]:
"""Get stored logs."""
def _get_logs() -> list[str]:
# copy the queue since it can mutate while iterating
records = self._records.copy()
return [self.format(record) for record in records]
return await hass.async_add_executor_job(_get_logs)
+9 -45
View File
@@ -8,15 +8,14 @@ from contextlib import suppress
import dataclasses
from functools import wraps
from http import HTTPStatus
import json
import logging
import time
from typing import Any, Concatenate, cast
from typing import Any, Concatenate
import aiohttp
from aiohttp import web
import attr
from hass_nabucasa import AlreadyConnectedError, Cloud, auth, thingtalk
from hass_nabucasa import Cloud, auth, thingtalk
from hass_nabucasa.const import STATE_DISCONNECTED
from hass_nabucasa.voice import TTS_VOICES
import voluptuous as vol
@@ -44,7 +43,6 @@ from .assist_pipeline import async_create_cloud_pipeline
from .client import CloudClient
from .const import (
DATA_CLOUD,
DATA_CLOUD_LOG_HANDLER,
EVENT_CLOUD_EVENT,
LOGIN_MFA_TIMEOUT,
PREF_ALEXA_REPORT_STATE,
@@ -65,9 +63,7 @@ from .subscription import async_subscription_info
_LOGGER = logging.getLogger(__name__)
_CLOUD_ERRORS: dict[
type[Exception], tuple[HTTPStatus, Callable[[Exception], str] | str]
] = {
_CLOUD_ERRORS: dict[type[Exception], tuple[HTTPStatus, str]] = {
TimeoutError: (
HTTPStatus.BAD_GATEWAY,
"Unable to reach the Home Assistant cloud.",
@@ -136,10 +132,6 @@ def async_setup(hass: HomeAssistant) -> None:
HTTPStatus.BAD_REQUEST,
"Multi-factor authentication expired, or not started. Please try again.",
),
AlreadyConnectedError: (
HTTPStatus.CONFLICT,
lambda x: json.dumps(cast(AlreadyConnectedError, x).details),
),
}
)
@@ -204,11 +196,7 @@ def _process_cloud_exception(exc: Exception, where: str) -> tuple[HTTPStatus, st
for err, value_info in _CLOUD_ERRORS.items():
if isinstance(exc, err):
status, content = value_info
err_info = (
status,
content if isinstance(content, str) else content(exc),
)
err_info = value_info
break
if err_info is None:
@@ -251,7 +239,6 @@ class CloudLoginView(HomeAssistantView):
vol.All(
{
vol.Required("email"): str,
vol.Optional("check_connection", default=False): bool,
vol.Exclusive("password", "login"): str,
vol.Exclusive("code", "login"): str,
},
@@ -270,11 +257,7 @@ class CloudLoginView(HomeAssistantView):
code = data.get("code")
if email and password:
await cloud.login(
email,
password,
check_connection=data["check_connection"],
)
await cloud.login(email, password)
else:
if (
@@ -286,12 +269,7 @@ class CloudLoginView(HomeAssistantView):
# Voluptuous should ensure that code is not None because password is
assert code is not None
await cloud.login_verify_totp(
email,
code,
self._mfa_tokens,
check_connection=data["check_connection"],
)
await cloud.login_verify_totp(email, code, self._mfa_tokens)
self._mfa_tokens = {}
self._mfa_tokens_set_time = 0
@@ -419,11 +397,8 @@ class DownloadSupportPackageView(HomeAssistantView):
url = "/api/cloud/support_package"
name = "api:cloud:support_package"
async def _generate_markdown(
self,
hass: HomeAssistant,
hass_info: dict[str, Any],
domains_info: dict[str, dict[str, str]],
def _generate_markdown(
self, hass_info: dict[str, Any], domains_info: dict[str, dict[str, str]]
) -> str:
def get_domain_table_markdown(domain_info: dict[str, Any]) -> str:
if len(domain_info) == 0:
@@ -449,17 +424,6 @@ class DownloadSupportPackageView(HomeAssistantView):
"</details>\n\n"
)
log_handler = hass.data[DATA_CLOUD_LOG_HANDLER]
logs = "\n".join(await log_handler.get_logs(hass))
markdown += (
"## Full logs\n\n"
"<details><summary>Logs</summary>\n\n"
"```logs\n"
f"{logs}\n"
"```\n\n"
"</details>\n"
)
return markdown
async def get(self, request: web.Request) -> web.Response:
@@ -469,7 +433,7 @@ class DownloadSupportPackageView(HomeAssistantView):
domain_health = await get_system_health_info(hass)
hass_info = domain_health.pop("homeassistant", {})
markdown = await self._generate_markdown(hass, hass_info, domain_health)
markdown = self._generate_markdown(hass_info, domain_health)
return web.Response(
body=markdown,
+2 -2
View File
@@ -12,7 +12,7 @@
"documentation": "https://www.home-assistant.io/integrations/cloud",
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==0.92.0"],
"loggers": ["hass_nabucasa"],
"requirements": ["hass-nabucasa==0.89.0"],
"single_config_entry": true
}
@@ -3,11 +3,11 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from aioelectricitymaps import (
ElectricityMaps,
ElectricityMapsError,
ElectricityMapsInvalidTokenError,
ElectricityMapsNoDataError,
)
@@ -36,8 +36,6 @@ TYPE_USE_HOME = "use_home_location"
TYPE_SPECIFY_COORDINATES = "specify_coordinates"
TYPE_SPECIFY_COUNTRY = "specify_country_code"
_LOGGER = logging.getLogger(__name__)
class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Co2signal."""
@@ -160,8 +158,7 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "invalid_auth"
except ElectricityMapsNoDataError:
errors["base"] = "no_data"
except Exception:
_LOGGER.exception("Unexpected error occurred while checking API key")
except ElectricityMapsError:
errors["base"] = "unknown"
else:
if self.source == SOURCE_REAUTH:
@@ -2,12 +2,10 @@
from __future__ import annotations
from collections.abc import Callable
import logging
import re
from typing import Literal
from hassil.recognize import RecognizeResult
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
@@ -243,10 +241,7 @@ async def async_handle_sentence_triggers(
async def async_handle_intents(
hass: HomeAssistant,
user_input: ConversationInput,
*,
intent_filter: Callable[[RecognizeResult], bool] | None = None,
hass: HomeAssistant, user_input: ConversationInput
) -> intent.IntentResponse | None:
"""Try to match input against registered intents and return response.
@@ -255,9 +250,7 @@ async def async_handle_intents(
default_agent = async_get_agent(hass)
assert isinstance(default_agent, DefaultAgent)
return await default_agent.async_handle_intents(
user_input, intent_filter=intent_filter
)
return await default_agent.async_handle_intents(user_input)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
@@ -49,11 +49,7 @@ def async_get_chat_log(
raise RuntimeError(
"Cannot attach chat log delta listener unless initial caller"
)
if user_input is not None and (
(content := chat_log.content[-1]).role != "user"
# MyPy doesn't understand that content is a UserContent here
or content.content != user_input.text # type: ignore[union-attr]
):
if user_input is not None:
chat_log.async_add_user_content(UserContent(content=user_input.text))
yield chat_log
@@ -53,7 +53,6 @@ from homeassistant.helpers import (
)
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_state_added_domain
from homeassistant.util import language as language_util
from homeassistant.util.json import JsonObjectType, json_loads_object
from .chat_log import AssistantContent, async_get_chat_log
@@ -185,6 +184,21 @@ class IntentCache:
self.cache.clear()
def _get_language_variations(language: str) -> Iterable[str]:
"""Generate language codes with and without region."""
yield language
parts = re.split(r"([-_])", language)
if len(parts) == 3:
lang, sep, region = parts
if sep == "_":
# en_US -> en-US
yield f"{lang}-{region}"
# en-US -> en
yield lang
async def async_setup_default_agent(
hass: core.HomeAssistant,
entity_component: EntityComponent[ConversationEntity],
@@ -900,20 +914,26 @@ class DefaultAgent(ConversationEntity):
def _load_intents(self, language: str) -> LanguageIntents | None:
"""Load all intents for language (run inside executor)."""
intents_dict: dict[str, Any] = {}
language_variant: str | None = None
supported_langs = set(get_languages())
# Choose a language variant upfront and commit to it for custom
# sentences, etc.
lang_matches = language_util.matches(language, supported_langs)
all_language_variants = {lang.lower(): lang for lang in supported_langs}
if not lang_matches:
# en-US, en_US, en, ...
for maybe_variant in _get_language_variations(language):
matching_variant = all_language_variants.get(maybe_variant.lower())
if matching_variant:
language_variant = matching_variant
break
if not language_variant:
_LOGGER.warning(
"Unable to find supported language variant for %s", language
)
return None
language_variant = lang_matches[0]
# Load intents for this language variant
lang_variant_intents = get_intents(language_variant, json_load=json_load)
@@ -1309,8 +1329,6 @@ class DefaultAgent(ConversationEntity):
async def async_handle_intents(
self,
user_input: ConversationInput,
*,
intent_filter: Callable[[RecognizeResult], bool] | None = None,
) -> intent.IntentResponse | None:
"""Try to match sentence against registered intents and return response.
@@ -1318,9 +1336,7 @@ class DefaultAgent(ConversationEntity):
Returns None if no match or a matching error occurred.
"""
result = await self.async_recognize_intent(user_input, strict_intents_only=True)
if not isinstance(result, RecognizeResult) or (
intent_filter is not None and intent_filter(result)
):
if not isinstance(result, RecognizeResult):
# No error message on failed match
return None
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.5"]
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
}
@@ -30,15 +30,10 @@ async def async_setup_entry(
async_add_entities(
[
DemoWaterHeater(
"Demo Water Heater", 119, UnitOfTemperature.FAHRENHEIT, False, "eco", 1
"Demo Water Heater", 119, UnitOfTemperature.FAHRENHEIT, False, "eco"
),
DemoWaterHeater(
"Demo Water Heater Celsius",
45,
UnitOfTemperature.CELSIUS,
True,
"eco",
1,
"Demo Water Heater Celsius", 45, UnitOfTemperature.CELSIUS, True, "eco"
),
]
)
@@ -57,7 +52,6 @@ class DemoWaterHeater(WaterHeaterEntity):
unit_of_measurement: str,
away: bool,
current_operation: str,
target_temperature_step: float,
) -> None:
"""Initialize the water_heater device."""
self._attr_name = name
@@ -80,7 +74,6 @@ class DemoWaterHeater(WaterHeaterEntity):
"gas",
"off",
]
self._attr_target_temperature_step = target_temperature_step
def set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperatures."""

Some files were not shown because too many files have changed in this diff Show More