diff --git a/.coveragerc b/.coveragerc index 03bc0c007e1..591e27797a1 100644 --- a/.coveragerc +++ b/.coveragerc @@ -67,9 +67,6 @@ omit = homeassistant/components/android_ip_webcam/switch.py homeassistant/components/anel_pwrctrl/switch.py homeassistant/components/anthemav/media_player.py - homeassistant/components/apcupsd/__init__.py - homeassistant/components/apcupsd/binary_sensor.py - homeassistant/components/apcupsd/sensor.py homeassistant/components/apple_tv/__init__.py homeassistant/components/apple_tv/browse_media.py homeassistant/components/apple_tv/media_player.py @@ -128,6 +125,7 @@ omit = homeassistant/components/blink/binary_sensor.py homeassistant/components/blink/camera.py homeassistant/components/blink/sensor.py + homeassistant/components/blink/switch.py homeassistant/components/blinksticklight/light.py homeassistant/components/blockchain/sensor.py homeassistant/components/bloomsky/* @@ -149,6 +147,7 @@ omit = homeassistant/components/braviatv/coordinator.py homeassistant/components/braviatv/media_player.py homeassistant/components/braviatv/remote.py + homeassistant/components/broadlink/climate.py homeassistant/components/broadlink/light.py homeassistant/components/broadlink/remote.py homeassistant/components/broadlink/switch.py @@ -221,9 +220,6 @@ omit = homeassistant/components/discogs/sensor.py homeassistant/components/discord/__init__.py homeassistant/components/discord/notify.py - homeassistant/components/discovergy/__init__.py - homeassistant/components/discovergy/sensor.py - homeassistant/components/discovergy/coordinator.py homeassistant/components/dlib_face_detect/image_processing.py homeassistant/components/dlib_face_identify/image_processing.py homeassistant/components/dlink/data.py @@ -343,7 +339,6 @@ omit = homeassistant/components/epson/__init__.py homeassistant/components/epson/media_player.py homeassistant/components/epsonworkforce/sensor.py - homeassistant/components/eq3btsmart/climate.py homeassistant/components/escea/__init__.py homeassistant/components/escea/climate.py homeassistant/components/escea/discovery.py @@ -374,7 +369,8 @@ omit = homeassistant/components/faa_delays/binary_sensor.py homeassistant/components/faa_delays/coordinator.py homeassistant/components/familyhub/camera.py - homeassistant/components/fastdotcom/* + homeassistant/components/fastdotcom/sensor.py + homeassistant/components/fastdotcom/__init__.py homeassistant/components/ffmpeg/camera.py homeassistant/components/fibaro/__init__.py homeassistant/components/fibaro/binary_sensor.py @@ -413,6 +409,9 @@ omit = homeassistant/components/fjaraskupan/sensor.py homeassistant/components/fleetgo/device_tracker.py homeassistant/components/flexit/climate.py + homeassistant/components/flexit_bacnet/__init__.py + homeassistant/components/flexit_bacnet/const.py + homeassistant/components/flexit_bacnet/climate.py homeassistant/components/flic/binary_sensor.py homeassistant/components/flick_electric/__init__.py homeassistant/components/flick_electric/sensor.py @@ -431,9 +430,7 @@ omit = homeassistant/components/foursquare/* homeassistant/components/free_mobile/notify.py homeassistant/components/freebox/camera.py - homeassistant/components/freebox/device_tracker.py homeassistant/components/freebox/home_base.py - homeassistant/components/freebox/router.py homeassistant/components/freebox/switch.py homeassistant/components/fritz/common.py homeassistant/components/fritz/device_tracker.py @@ -644,8 +641,6 @@ omit = homeassistant/components/kodi/browse_media.py homeassistant/components/kodi/media_player.py homeassistant/components/kodi/notify.py - homeassistant/components/komfovent/__init__.py - homeassistant/components/komfovent/climate.py homeassistant/components/konnected/__init__.py homeassistant/components/konnected/panel.py homeassistant/components/konnected/switch.py @@ -840,6 +835,7 @@ omit = homeassistant/components/noaa_tides/sensor.py homeassistant/components/nobo_hub/__init__.py homeassistant/components/nobo_hub/climate.py + homeassistant/components/nobo_hub/select.py homeassistant/components/nobo_hub/sensor.py homeassistant/components/norway_air/air_quality.py homeassistant/components/notify_events/notify.py @@ -940,6 +936,9 @@ omit = homeassistant/components/panasonic_viera/media_player.py homeassistant/components/pandora/media_player.py homeassistant/components/pencom/switch.py + homeassistant/components/permobil/__init__.py + homeassistant/components/permobil/coordinator.py + homeassistant/components/permobil/sensor.py homeassistant/components/philips_js/__init__.py homeassistant/components/philips_js/light.py homeassistant/components/philips_js/media_player.py @@ -953,8 +952,6 @@ omit = homeassistant/components/pilight/light.py homeassistant/components/pilight/switch.py homeassistant/components/ping/__init__.py - homeassistant/components/ping/binary_sensor.py - homeassistant/components/ping/device_tracker.py homeassistant/components/ping/helpers.py homeassistant/components/pioneer/media_player.py homeassistant/components/plaato/__init__.py @@ -1136,10 +1133,7 @@ omit = homeassistant/components/sky_hub/* homeassistant/components/skybeacon/sensor.py homeassistant/components/skybell/__init__.py - homeassistant/components/skybell/binary_sensor.py homeassistant/components/skybell/camera.py - homeassistant/components/skybell/coordinator.py - homeassistant/components/skybell/entity.py homeassistant/components/skybell/light.py homeassistant/components/skybell/sensor.py homeassistant/components/skybell/switch.py @@ -1480,6 +1474,7 @@ omit = homeassistant/components/vicare/button.py homeassistant/components/vicare/climate.py homeassistant/components/vicare/entity.py + homeassistant/components/vicare/number.py homeassistant/components/vicare/sensor.py homeassistant/components/vicare/utils.py homeassistant/components/vicare/water_heater.py diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 27e2d2e5ad0..44a81718e10 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -10,6 +10,8 @@ "customizations": { "vscode": { "extensions": [ + "charliermarsh.ruff", + "ms-python.pylint", "ms-python.vscode-pylance", "visualstudioexptteam.vscodeintellicode", "redhat.vscode-yaml", @@ -19,14 +21,6 @@ // Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json "settings": { "python.pythonPath": "/usr/local/bin/python", - "python.linting.enabled": true, - "python.linting.pylintEnabled": true, - "python.formatting.blackPath": "/usr/local/bin/black", - "python.linting.pycodestylePath": "/usr/local/bin/pycodestyle", - "python.linting.pydocstylePath": "/usr/local/bin/pydocstyle", - "python.linting.mypyPath": "/usr/local/bin/mypy", - "python.linting.pylintPath": "/usr/local/bin/pylint", - "python.formatting.provider": "black", "python.testing.pytestArgs": ["--no-cov"], "editor.formatOnPaste": false, "editor.formatOnSave": true, @@ -45,7 +39,10 @@ "!include_dir_list scalar", "!include_dir_merge_list scalar", "!include_dir_merge_named scalar" - ] + ], + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff" + } } } } diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 4bc1442d9e9..d69b1ac0c7d 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -60,7 +60,7 @@ - [ ] There is no commented out code in this PR. - [ ] I have followed the [development checklist][dev-checklist] - [ ] I have followed the [perfect PR recommendations][perfect-pr] -- [ ] The code has been formatted using Black (`black --fast homeassistant tests`) +- [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`) - [ ] Tests have been added to verify that the new code works. If user exposed functionality or configuration variables are added/changed: diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4b99c3ddc04..b9b9c8babb9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,8 +36,7 @@ env: CACHE_VERSION: 5 PIP_CACHE_VERSION: 4 MYPY_CACHE_VERSION: 6 - BLACK_CACHE_VERSION: 1 - HA_SHORT_VERSION: "2023.12" + HA_SHORT_VERSION: "2024.1" DEFAULT_PYTHON: "3.11" ALL_PYTHON_VERSIONS: "['3.11', '3.12']" # 10.3 is the oldest supported version @@ -58,7 +57,6 @@ env: POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']" PRE_COMMIT_CACHE: ~/.cache/pre-commit PIP_CACHE: /tmp/pip-cache - BLACK_CACHE: /tmp/black-cache SQLALCHEMY_WARN_20: 1 PYTHONASYNCIODEBUG: 1 HASS_CI: 1 @@ -261,8 +259,8 @@ jobs: . venv/bin/activate pre-commit install-hooks - lint-black: - name: Check black + lint-ruff-format: + name: Check ruff-format runs-on: ubuntu-22.04 needs: - info @@ -276,13 +274,6 @@ jobs: with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - - name: Generate partial black restore key - id: generate-black-key - run: | - black_version=$(cat requirements_test_pre_commit.txt | grep black | cut -d '=' -f 3) - echo "version=$black_version" >> $GITHUB_OUTPUT - echo "key=black-${{ env.BLACK_CACHE_VERSION }}-$black_version-${{ - env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore base Python virtual environment id: cache-venv uses: actions/cache/restore@v3.3.2 @@ -301,33 +292,12 @@ jobs: key: >- ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.pre-commit_cache_key }} - - name: Restore black cache - uses: actions/cache@v3.3.2 - with: - path: ${{ env.BLACK_CACHE }} - key: >- - ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ - steps.generate-black-key.outputs.key }} - restore-keys: | - ${{ runner.os }}-${{ steps.python.outputs.python-version }}-black-${{ - env.BLACK_CACHE_VERSION }}-${{ steps.generate-black-key.outputs.version }}-${{ - env.HA_SHORT_VERSION }}- - - name: Run black (fully) - if: needs.info.outputs.test_full_suite == 'true' - env: - BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }} + - name: Run ruff-format run: | . venv/bin/activate - pre-commit run --hook-stage manual black --all-files --show-diff-on-failure - - name: Run black (partially) - if: needs.info.outputs.test_full_suite == 'false' - shell: bash + pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure env: - BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }} - run: | - . venv/bin/activate - shopt -s globstar - pre-commit run --hook-stage manual black --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure + RUFF_OUTPUT_FORMAT: github lint-ruff: name: Check ruff @@ -362,22 +332,12 @@ jobs: key: >- ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.pre-commit_cache_key }} - - name: Register ruff problem matcher - run: | - echo "::add-matcher::.github/workflows/matchers/ruff.json" - - name: Run ruff (fully) - if: needs.info.outputs.test_full_suite == 'true' + - name: Run ruff run: | . venv/bin/activate pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure - - name: Run ruff (partially) - if: needs.info.outputs.test_full_suite == 'false' - shell: bash - run: | - . venv/bin/activate - shopt -s globstar - pre-commit run --hook-stage manual ruff --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure - + env: + RUFF_OUTPUT_FORMAT: github lint-other: name: Check other linters runs-on: ubuntu-22.04 @@ -787,7 +747,7 @@ jobs: cov_params+=(--cov-report=xml) fi - python3 -X dev -m pytest \ + python3 -b -X dev -m pytest \ -qq \ --timeout=9 \ --durations=10 \ @@ -824,7 +784,7 @@ jobs: cov_params+=(--cov-report=term-missing) fi - python3 -X dev -m pytest \ + python3 -b -X dev -m pytest \ -qq \ --timeout=9 \ -n auto \ @@ -945,7 +905,7 @@ jobs: cov_params+=(--cov-report=term-missing) fi - python3 -X dev -m pytest \ + python3 -b -X dev -m pytest \ -qq \ --timeout=20 \ -n 1 \ @@ -1069,7 +1029,7 @@ jobs: cov_params+=(--cov-report=term-missing) fi - python3 -X dev -m pytest \ + python3 -b -X dev -m pytest \ -qq \ --timeout=9 \ -n 1 \ diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index ccd2d3c1678..e7d9d4cd901 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -29,11 +29,11 @@ jobs: uses: actions/checkout@v4.1.1 - name: Initialize CodeQL - uses: github/codeql-action/init@v2.22.5 + uses: github/codeql-action/init@v2.22.8 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2.22.5 + uses: github/codeql-action/analyze@v2.22.8 with: category: "/language:python" diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index 2b5364fa950..fb5deb2958f 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -10,7 +10,7 @@ jobs: if: github.repository_owner == 'home-assistant' runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@v4.0.1 + - uses: dessant/lock-threads@v5.0.1 with: github-token: ${{ github.token }} issue-inactive-days: "30" diff --git a/.github/workflows/matchers/ruff.json b/.github/workflows/matchers/ruff.json deleted file mode 100644 index d189a3656a5..00000000000 --- a/.github/workflows/matchers/ruff.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "problemMatcher": [ - { - "owner": "ruff-error", - "severity": "error", - "pattern": [ - { - "regexp": "^(.*):(\\d+):(\\d+):\\s([EF]\\d{3}\\s.*)$", - "file": 1, - "line": 2, - "column": 3, - "message": 4 - } - ] - }, - { - "owner": "ruff-warning", - "severity": "warning", - "pattern": [ - { - "regexp": "^(.*):(\\d+):(\\d+):\\s([CDNW]\\d{3}\\s.*)$", - "file": 1, - "line": 2, - "column": 3, - "message": 4 - } - ] - } - ] -} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5d43bcf1b02..ae135f30407 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,16 +1,11 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.1 + rev: v0.1.6 hooks: - id: ruff args: - --fix - - repo: https://github.com/psf/black-pre-commit-mirror - rev: 23.11.0 - hooks: - - id: black - args: - - --quiet + - id: ruff-format files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$ - repo: https://github.com/codespell-project/codespell rev: v2.2.2 diff --git a/.strict-typing b/.strict-typing index de193888bfe..eeaee4d2f37 100644 --- a/.strict-typing +++ b/.strict-typing @@ -121,6 +121,7 @@ homeassistant.components.energy.* homeassistant.components.esphome.* homeassistant.components.event.* homeassistant.components.evil_genius_labs.* +homeassistant.components.faa_delays.* homeassistant.components.fan.* homeassistant.components.fastdotcom.* homeassistant.components.feedreader.* @@ -128,6 +129,7 @@ homeassistant.components.file_upload.* homeassistant.components.filesize.* homeassistant.components.filter.* homeassistant.components.fitbit.* +homeassistant.components.flexit_bacnet.* homeassistant.components.flux_led.* homeassistant.components.forecast_solar.* homeassistant.components.fritz.* @@ -203,6 +205,7 @@ homeassistant.components.ld2410_ble.* homeassistant.components.lidarr.* homeassistant.components.lifx.* homeassistant.components.light.* +homeassistant.components.linear_garage_door.* homeassistant.components.litejet.* homeassistant.components.litterrobot.* homeassistant.components.local_ip.* @@ -264,6 +267,7 @@ homeassistant.components.proximity.* homeassistant.components.prusalink.* homeassistant.components.pure_energie.* homeassistant.components.purpleair.* +homeassistant.components.pushbullet.* homeassistant.components.pvoutput.* homeassistant.components.qnap_qsw.* homeassistant.components.radarr.* diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 951134133e5..8a5d7d486b7 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,3 +1,7 @@ { - "recommendations": ["esbenp.prettier-vscode", "ms-python.python"] + "recommendations": [ + "charliermarsh.ruff", + "esbenp.prettier-vscode", + "ms-python.python" + ] } diff --git a/.vscode/settings.default.json b/.vscode/settings.default.json index 3765d1251b8..e0792a360f1 100644 --- a/.vscode/settings.default.json +++ b/.vscode/settings.default.json @@ -1,6 +1,5 @@ { // Please keep this file in sync with settings in home-assistant/.devcontainer/devcontainer.json - "python.formatting.provider": "black", // Added --no-cov to work around TypeError: message must be set // https://github.com/microsoft/vscode-python/issues/14067 "python.testing.pytestArgs": ["--no-cov"], diff --git a/CODEOWNERS b/CODEOWNERS index 7fceaacef4e..8ffbdfea876 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -153,8 +153,8 @@ build.json @home-assistant/supervisor /homeassistant/components/bizkaibus/ @UgaitzEtxebarria /homeassistant/components/blebox/ @bbx-a @riokuu /tests/components/blebox/ @bbx-a @riokuu -/homeassistant/components/blink/ @fronzbot -/tests/components/blink/ @fronzbot +/homeassistant/components/blink/ @fronzbot @mkmer +/tests/components/blink/ @fronzbot @mkmer /homeassistant/components/bluemaestro/ @bdraco /tests/components/bluemaestro/ @bdraco /homeassistant/components/blueprint/ @home-assistant/core @@ -172,8 +172,8 @@ build.json @home-assistant/supervisor /tests/components/bosch_shc/ @tschamm /homeassistant/components/braviatv/ @bieniu @Drafteed /tests/components/braviatv/ @bieniu @Drafteed -/homeassistant/components/broadlink/ @danielhiversen @felipediel @L-I-Am -/tests/components/broadlink/ @danielhiversen @felipediel @L-I-Am +/homeassistant/components/broadlink/ @danielhiversen @felipediel @L-I-Am @eifinger +/tests/components/broadlink/ @danielhiversen @felipediel @L-I-Am @eifinger /homeassistant/components/brother/ @bieniu /tests/components/brother/ @bieniu /homeassistant/components/brottsplatskartan/ @gjohansson-ST @@ -261,6 +261,8 @@ build.json @home-assistant/supervisor /tests/components/denonavr/ @ol-iver @starkillerOG /homeassistant/components/derivative/ @afaucogney /tests/components/derivative/ @afaucogney +/homeassistant/components/devialet/ @fwestenberg +/tests/components/devialet/ @fwestenberg /homeassistant/components/device_automation/ @home-assistant/core /tests/components/device_automation/ @home-assistant/core /homeassistant/components/device_tracker/ @home-assistant/core @@ -309,8 +311,8 @@ build.json @home-assistant/supervisor /tests/components/eafm/ @Jc2k /homeassistant/components/easyenergy/ @klaasnicolaas /tests/components/easyenergy/ @klaasnicolaas -/homeassistant/components/ecobee/ @marthoc @marcolivierarsenault -/tests/components/ecobee/ @marthoc @marcolivierarsenault +/homeassistant/components/ecobee/ @marcolivierarsenault +/tests/components/ecobee/ @marcolivierarsenault /homeassistant/components/ecoforest/ @pjanuario /tests/components/ecoforest/ @pjanuario /homeassistant/components/econet/ @w1ll1am23 @@ -347,17 +349,15 @@ build.json @home-assistant/supervisor /homeassistant/components/enigma2/ @fbradyirl /homeassistant/components/enocean/ @bdurrer /tests/components/enocean/ @bdurrer -/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek -/tests/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek +/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac +/tests/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac /homeassistant/components/entur_public_transport/ @hfurubotten /homeassistant/components/environment_canada/ @gwww @michaeldavie /tests/components/environment_canada/ @gwww @michaeldavie -/homeassistant/components/envisalink/ @ufodone /homeassistant/components/ephember/ @ttroy50 /homeassistant/components/epson/ @pszafer /tests/components/epson/ @pszafer /homeassistant/components/epsonworkforce/ @ThaStealth -/homeassistant/components/eq3btsmart/ @rytilahti /homeassistant/components/escea/ @lazdavila /tests/components/escea/ @lazdavila /homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco @@ -375,7 +375,8 @@ build.json @home-assistant/supervisor /tests/components/faa_delays/ @ntilley905 /homeassistant/components/fan/ @home-assistant/core /tests/components/fan/ @home-assistant/core -/homeassistant/components/fastdotcom/ @rohankapoorcom +/homeassistant/components/fastdotcom/ @rohankapoorcom @erwindouna +/tests/components/fastdotcom/ @rohankapoorcom @erwindouna /homeassistant/components/fibaro/ @rappenze /tests/components/fibaro/ @rappenze /homeassistant/components/file/ @fabaff @@ -396,6 +397,8 @@ build.json @home-assistant/supervisor /tests/components/fivem/ @Sander0542 /homeassistant/components/fjaraskupan/ @elupus /tests/components/fjaraskupan/ @elupus +/homeassistant/components/flexit_bacnet/ @lellky @piotrbulinski +/tests/components/flexit_bacnet/ @lellky @piotrbulinski /homeassistant/components/flick_electric/ @ZephireNZ /tests/components/flick_electric/ @ZephireNZ /homeassistant/components/flipr/ @cnico @@ -492,8 +495,6 @@ build.json @home-assistant/supervisor /tests/components/greeneye_monitor/ @jkeljo /homeassistant/components/group/ @home-assistant/core /tests/components/group/ @home-assistant/core -/homeassistant/components/growatt_server/ @muppet3000 -/tests/components/growatt_server/ @muppet3000 /homeassistant/components/guardian/ @bachya /tests/components/guardian/ @bachya /homeassistant/components/habitica/ @ASMfreaK @leikoilja @@ -666,8 +667,6 @@ build.json @home-assistant/supervisor /tests/components/knx/ @Julius2342 @farmio @marvin-w /homeassistant/components/kodi/ @OnFreund /tests/components/kodi/ @OnFreund -/homeassistant/components/komfovent/ @ProstoSanja -/tests/components/komfovent/ @ProstoSanja /homeassistant/components/konnected/ @heythisisnate /tests/components/konnected/ @heythisisnate /homeassistant/components/kostal_plenticore/ @stegm @@ -703,6 +702,8 @@ build.json @home-assistant/supervisor /tests/components/life360/ @pnbruckner /homeassistant/components/light/ @home-assistant/core /tests/components/light/ @home-assistant/core +/homeassistant/components/linear_garage_door/ @IceBotYT +/tests/components/linear_garage_door/ @IceBotYT /homeassistant/components/linux_battery/ @fabaff /homeassistant/components/litejet/ @joncar /tests/components/litejet/ @joncar @@ -931,6 +932,8 @@ build.json @home-assistant/supervisor /homeassistant/components/oru/ @bvlaicu /homeassistant/components/otbr/ @home-assistant/core /tests/components/otbr/ @home-assistant/core +/homeassistant/components/ourgroceries/ @OnFreund +/tests/components/ourgroceries/ @OnFreund /homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev /tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev /homeassistant/components/ovo_energy/ @timmo001 @@ -945,6 +948,8 @@ build.json @home-assistant/supervisor /tests/components/peco/ @IceBotYT /homeassistant/components/pegel_online/ @mib1185 /tests/components/pegel_online/ @mib1185 +/homeassistant/components/permobil/ @IsakNyberg +/tests/components/permobil/ @IsakNyberg /homeassistant/components/persistent_notification/ @home-assistant/core /tests/components/persistent_notification/ @home-assistant/core /homeassistant/components/philips_js/ @elupus @@ -981,6 +986,8 @@ build.json @home-assistant/supervisor /tests/components/prometheus/ @knyar /homeassistant/components/prosegur/ @dgomes /tests/components/prosegur/ @dgomes +/homeassistant/components/proximity/ @mib1185 +/tests/components/proximity/ @mib1185 /homeassistant/components/proxmoxve/ @jhollowe @Corbeno /homeassistant/components/prusalink/ @balloob /tests/components/prusalink/ @balloob @@ -1054,7 +1061,7 @@ build.json @home-assistant/supervisor /tests/components/reolink/ @starkillerOG /homeassistant/components/repairs/ @home-assistant/core /tests/components/repairs/ @home-assistant/core -/homeassistant/components/repetier/ @MTrab @ShadowBr0ther +/homeassistant/components/repetier/ @ShadowBr0ther /homeassistant/components/rflink/ @javicalle /tests/components/rflink/ @javicalle /homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221 @@ -1235,8 +1242,8 @@ build.json @home-assistant/supervisor /tests/components/stookwijzer/ @fwestenberg /homeassistant/components/stream/ @hunterjm @uvjustin @allenporter /tests/components/stream/ @hunterjm @uvjustin @allenporter -/homeassistant/components/stt/ @home-assistant/core @pvizeli -/tests/components/stt/ @home-assistant/core @pvizeli +/homeassistant/components/stt/ @home-assistant/core +/tests/components/stt/ @home-assistant/core /homeassistant/components/subaru/ @G-Two /tests/components/subaru/ @G-Two /homeassistant/components/suez_water/ @ooii @@ -1321,8 +1328,8 @@ build.json @home-assistant/supervisor /tests/components/tomorrowio/ @raman325 @lymanepp /homeassistant/components/totalconnect/ @austinmroczek /tests/components/totalconnect/ @austinmroczek -/homeassistant/components/tplink/ @rytilahti @thegardenmonkey -/tests/components/tplink/ @rytilahti @thegardenmonkey +/homeassistant/components/tplink/ @rytilahti @thegardenmonkey @bdraco +/tests/components/tplink/ @rytilahti @thegardenmonkey @bdraco /homeassistant/components/tplink_omada/ @MarkGodwin /tests/components/tplink_omada/ @MarkGodwin /homeassistant/components/traccar/ @ludeeus @@ -1343,8 +1350,8 @@ build.json @home-assistant/supervisor /tests/components/transmission/ @engrbm87 @JPHutchins /homeassistant/components/trend/ @jpbede /tests/components/trend/ @jpbede -/homeassistant/components/tts/ @home-assistant/core @pvizeli -/tests/components/tts/ @home-assistant/core @pvizeli +/homeassistant/components/tts/ @home-assistant/core +/tests/components/tts/ @home-assistant/core /homeassistant/components/tuya/ @Tuya @zlinoliver @frenck /tests/components/tuya/ @Tuya @zlinoliver @frenck /homeassistant/components/twentemilieu/ @frenck @@ -1393,8 +1400,8 @@ build.json @home-assistant/supervisor /homeassistant/components/versasense/ @imstevenxyz /homeassistant/components/version/ @ludeeus /tests/components/version/ @ludeeus -/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey -/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey +/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja +/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja /homeassistant/components/vicare/ @CFenner /tests/components/vicare/ @CFenner /homeassistant/components/vilfo/ @ManneW diff --git a/Dockerfile b/Dockerfile index b61e1461c52..97eeb5b0dfa 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,6 @@ +# Automatically generated by hassfest. +# +# To update, run python3 -m script.hassfest -p docker ARG BUILD_FROM FROM ${BUILD_FROM} diff --git a/Dockerfile.dev b/Dockerfile.dev index 857ccfa3997..a1143adde89 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -5,8 +5,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"] # Uninstall pre-installed formatting and linting tools # They would conflict with our pinned versions RUN \ - pipx uninstall black \ - && pipx uninstall pydocstyle \ + pipx uninstall pydocstyle \ && pipx uninstall pycodestyle \ && pipx uninstall mypy \ && pipx uninstall pylint diff --git a/homeassistant/auth/__init__.py b/homeassistant/auth/__init__.py index 2707f8b6899..000dde90faa 100644 --- a/homeassistant/auth/__init__.py +++ b/homeassistant/auth/__init__.py @@ -280,7 +280,8 @@ class AuthManager: credentials=credentials, name=info.name, is_active=info.is_active, - group_ids=[GROUP_ID_ADMIN], + group_ids=[GROUP_ID_ADMIN if info.group is None else info.group], + local_only=info.local_only, ) self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id}) diff --git a/homeassistant/auth/models.py b/homeassistant/auth/models.py index e604bf9d21c..32a700d65f9 100644 --- a/homeassistant/auth/models.py +++ b/homeassistant/auth/models.py @@ -134,3 +134,5 @@ class UserMeta(NamedTuple): name: str | None is_active: bool + group: str | None = None + local_only: bool | None = None diff --git a/homeassistant/auth/permissions/types.py b/homeassistant/auth/permissions/types.py index 0aa8807211a..cf3632d06d5 100644 --- a/homeassistant/auth/permissions/types.py +++ b/homeassistant/auth/permissions/types.py @@ -5,9 +5,7 @@ from collections.abc import Mapping ValueType = ( # Example: entities.all = { read: true, control: true } - Mapping[str, bool] - | bool - | None + Mapping[str, bool] | bool | None ) # Example: entities.domains = { light: … } diff --git a/homeassistant/auth/providers/command_line.py b/homeassistant/auth/providers/command_line.py index bfe8a2fdddb..4ec2ca18611 100644 --- a/homeassistant/auth/providers/command_line.py +++ b/homeassistant/auth/providers/command_line.py @@ -44,7 +44,11 @@ class CommandLineAuthProvider(AuthProvider): DEFAULT_TITLE = "Command Line Authentication" # which keys to accept from a program's stdout - ALLOWED_META_KEYS = ("name",) + ALLOWED_META_KEYS = ( + "name", + "group", + "local_only", + ) def __init__(self, *args: Any, **kwargs: Any) -> None: """Extend parent's __init__. @@ -118,10 +122,15 @@ class CommandLineAuthProvider(AuthProvider): ) -> UserMeta: """Return extra user metadata for credentials. - Currently, only name is supported. + Currently, supports name, group and local_only. """ meta = self._user_meta.get(credentials.data["username"], {}) - return UserMeta(name=meta.get("name"), is_active=True) + return UserMeta( + name=meta.get("name"), + is_active=True, + group=meta.get("group"), + local_only=meta.get("local_only") == "true", + ) class CommandLineLoginFlow(LoginFlow): diff --git a/homeassistant/auth/providers/legacy_api_password.py b/homeassistant/auth/providers/legacy_api_password.py index 0cadbf07589..98c246d74e4 100644 --- a/homeassistant/auth/providers/legacy_api_password.py +++ b/homeassistant/auth/providers/legacy_api_password.py @@ -10,10 +10,11 @@ from typing import Any, cast import voluptuous as vol -from homeassistant.core import callback +from homeassistant.core import async_get_hass, callback from homeassistant.data_entry_flow import FlowResult from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from ..models import Credentials, UserMeta from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow @@ -21,10 +22,28 @@ from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow AUTH_PROVIDER_TYPE = "legacy_api_password" CONF_API_PASSWORD = "api_password" -CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend( +_CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend( {vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA ) + +def _create_repair_and_validate(config: dict[str, Any]) -> dict[str, Any]: + async_create_issue( + async_get_hass(), + "auth", + "deprecated_legacy_api_password", + breaks_in_ha_version="2024.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_legacy_api_password", + ) + + return _CONFIG_SCHEMA(config) # type: ignore[no-any-return] + + +CONFIG_SCHEMA = _create_repair_and_validate + + LEGACY_USER_NAME = "Legacy API password user" diff --git a/homeassistant/auth/providers/trusted_networks.py b/homeassistant/auth/providers/trusted_networks.py index 6962671cb2f..cc195c14c23 100644 --- a/homeassistant/auth/providers/trusted_networks.py +++ b/homeassistant/auth/providers/trusted_networks.py @@ -22,6 +22,7 @@ from homeassistant.core import callback from homeassistant.data_entry_flow import FlowResult from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.network import is_cloud_connection from .. import InvalidAuthError from ..models import Credentials, RefreshToken, UserMeta @@ -192,11 +193,8 @@ class TrustedNetworksAuthProvider(AuthProvider): if any(ip_addr in trusted_proxy for trusted_proxy in self.trusted_proxies): raise InvalidAuthError("Can't allow access from a proxy server") - if "cloud" in self.hass.config.components: - from hass_nabucasa import remote # pylint: disable=import-outside-toplevel - - if remote.is_cloud_request.get(): - raise InvalidAuthError("Can't allow access from Home Assistant Cloud") + if is_cloud_connection(self.hass): + raise InvalidAuthError("Can't allow access from Home Assistant Cloud") @callback def async_validate_refresh_token( diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index b9bb638e052..0998ac6274c 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -41,6 +41,7 @@ from .setup import ( DATA_SETUP, DATA_SETUP_STARTED, DATA_SETUP_TIME, + async_notify_setup_error, async_set_domains_to_be_loaded, async_setup_component, ) @@ -292,7 +293,8 @@ async def async_from_config_dict( try: await conf_util.async_process_ha_core_config(hass, core_config) except vol.Invalid as config_err: - conf_util.async_log_exception(config_err, "homeassistant", core_config, hass) + conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass) + async_notify_setup_error(hass, core.DOMAIN) return None except HomeAssistantError: _LOGGER.error( diff --git a/homeassistant/brands/eq3.json b/homeassistant/brands/eq3.json index 4052afac277..f5b1c8aeb87 100644 --- a/homeassistant/brands/eq3.json +++ b/homeassistant/brands/eq3.json @@ -1,5 +1,5 @@ { "domain": "eq3", "name": "eQ-3", - "integrations": ["eq3btsmart", "maxcube"] + "integrations": ["maxcube"] } diff --git a/homeassistant/brands/flexit.json b/homeassistant/brands/flexit.json new file mode 100644 index 00000000000..4c61c5eeb07 --- /dev/null +++ b/homeassistant/brands/flexit.json @@ -0,0 +1,5 @@ +{ + "domain": "flexit", + "name": "Flexit", + "integrations": ["flexit", "flexit_bacnet"] +} diff --git a/homeassistant/components/accuweather/manifest.json b/homeassistant/components/accuweather/manifest.json index b74711ccbe6..2974c36607b 100644 --- a/homeassistant/components/accuweather/manifest.json +++ b/homeassistant/components/accuweather/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["accuweather"], "quality_scale": "platinum", - "requirements": ["accuweather==2.1.0"] + "requirements": ["accuweather==2.1.1"] } diff --git a/homeassistant/components/adax/manifest.json b/homeassistant/components/adax/manifest.json index 65cffc509d5..2742180333b 100644 --- a/homeassistant/components/adax/manifest.json +++ b/homeassistant/components/adax/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/adax", "iot_class": "local_polling", "loggers": ["adax", "adax_local"], - "requirements": ["adax==0.3.0", "Adax-local==0.1.5"] + "requirements": ["adax==0.4.0", "Adax-local==0.1.5"] } diff --git a/homeassistant/components/adguard/manifest.json b/homeassistant/components/adguard/manifest.json index 24e1283e9df..52add51a663 100644 --- a/homeassistant/components/adguard/manifest.json +++ b/homeassistant/components/adguard/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "local_polling", "loggers": ["adguardhome"], - "requirements": ["adguardhome==0.6.2"] + "requirements": ["adguardhome==0.6.3"] } diff --git a/homeassistant/components/adguard/sensor.py b/homeassistant/components/adguard/sensor.py index 9f1c0a5b0fe..523e1b73e16 100644 --- a/homeassistant/components/adguard/sensor.py +++ b/homeassistant/components/adguard/sensor.py @@ -22,20 +22,13 @@ SCAN_INTERVAL = timedelta(seconds=300) PARALLEL_UPDATES = 4 -@dataclass -class AdGuardHomeEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class AdGuardHomeEntityDescription(SensorEntityDescription): + """Describes AdGuard Home sensor entity.""" value_fn: Callable[[AdGuardHome], Coroutine[Any, Any, int | float]] -@dataclass -class AdGuardHomeEntityDescription( - SensorEntityDescription, AdGuardHomeEntityDescriptionMixin -): - """Describes AdGuard Home sensor entity.""" - - SENSORS: tuple[AdGuardHomeEntityDescription, ...] = ( AdGuardHomeEntityDescription( key="dns_queries", diff --git a/homeassistant/components/adguard/strings.json b/homeassistant/components/adguard/strings.json index e34a7c88229..5b6a5a546f7 100644 --- a/homeassistant/components/adguard/strings.json +++ b/homeassistant/components/adguard/strings.json @@ -10,6 +10,9 @@ "username": "[%key:common::config_flow::data::username%]", "ssl": "[%key:common::config_flow::data::ssl%]", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of the device running your AdGuard Home." } }, "hassio_confirm": { diff --git a/homeassistant/components/adguard/switch.py b/homeassistant/components/adguard/switch.py index 1020e8690f1..944a3c7b269 100644 --- a/homeassistant/components/adguard/switch.py +++ b/homeassistant/components/adguard/switch.py @@ -21,22 +21,15 @@ SCAN_INTERVAL = timedelta(seconds=10) PARALLEL_UPDATES = 1 -@dataclass -class AdGuardHomeSwitchEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class AdGuardHomeSwitchEntityDescription(SwitchEntityDescription): + """Describes AdGuard Home switch entity.""" is_on_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, bool]]] turn_on_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, None]]] turn_off_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, None]]] -@dataclass -class AdGuardHomeSwitchEntityDescription( - SwitchEntityDescription, AdGuardHomeSwitchEntityDescriptionMixin -): - """Describes AdGuard Home switch entity.""" - - SWITCHES: tuple[AdGuardHomeSwitchEntityDescription, ...] = ( AdGuardHomeSwitchEntityDescription( key="protection", diff --git a/homeassistant/components/agent_dvr/strings.json b/homeassistant/components/agent_dvr/strings.json index 77167b8294b..cbfc2e87a4d 100644 --- a/homeassistant/components/agent_dvr/strings.json +++ b/homeassistant/components/agent_dvr/strings.json @@ -6,6 +6,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The IP address of the Agent DVR server." } } }, diff --git a/homeassistant/components/airq/const.py b/homeassistant/components/airq/const.py index 82719515cbf..d1a2340b4bc 100644 --- a/homeassistant/components/airq/const.py +++ b/homeassistant/components/airq/const.py @@ -3,7 +3,6 @@ from typing import Final DOMAIN: Final = "airq" MANUFACTURER: Final = "CorantGmbH" -TARGET_ROUTE: Final = "average" CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³" ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³" UPDATE_INTERVAL: float = 10.0 diff --git a/homeassistant/components/airq/coordinator.py b/homeassistant/components/airq/coordinator.py index 2d0d9d199df..76459005c45 100644 --- a/homeassistant/components/airq/coordinator.py +++ b/homeassistant/components/airq/coordinator.py @@ -13,7 +13,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN, MANUFACTURER, TARGET_ROUTE, UPDATE_INTERVAL +from .const import DOMAIN, MANUFACTURER, UPDATE_INTERVAL _LOGGER = logging.getLogger(__name__) @@ -56,6 +56,4 @@ class AirQCoordinator(DataUpdateCoordinator): hw_version=info["hw_version"], ) ) - - data = await self.airq.get(TARGET_ROUTE) - return self.airq.drop_uncertainties_from_data(data) + return await self.airq.get_latest_data() diff --git a/homeassistant/components/airq/manifest.json b/homeassistant/components/airq/manifest.json index 97fb70c1b05..156f167913b 100644 --- a/homeassistant/components/airq/manifest.json +++ b/homeassistant/components/airq/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aioairq"], - "requirements": ["aioairq==0.2.4"] + "requirements": ["aioairq==0.3.1"] } diff --git a/homeassistant/components/airtouch4/strings.json b/homeassistant/components/airtouch4/strings.json index 240b3e0007c..04c2e54cc7e 100644 --- a/homeassistant/components/airtouch4/strings.json +++ b/homeassistant/components/airtouch4/strings.json @@ -12,6 +12,9 @@ "title": "Set up your AirTouch 4 connection details.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your AirTouch controller." } } } diff --git a/homeassistant/components/airvisual_pro/strings.json b/homeassistant/components/airvisual_pro/strings.json index b5c68371fdf..641fa8963da 100644 --- a/homeassistant/components/airvisual_pro/strings.json +++ b/homeassistant/components/airvisual_pro/strings.json @@ -12,6 +12,9 @@ "data": { "ip_address": "[%key:common::config_flow::data::host%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "ip_address": "The hostname or IP address of your AirVisual Pro device." } } }, diff --git a/homeassistant/components/alarmdecoder/strings.json b/homeassistant/components/alarmdecoder/strings.json index d7ac882bb82..dd698201b09 100644 --- a/homeassistant/components/alarmdecoder/strings.json +++ b/homeassistant/components/alarmdecoder/strings.json @@ -14,6 +14,10 @@ "port": "[%key:common::config_flow::data::port%]", "device_baudrate": "Device Baud Rate", "device_path": "Device Path" + }, + "data_description": { + "host": "The hostname or IP address of the AlarmDecoder device that is connected to your alarm panel.", + "port": "The port on which AlarmDecoder is accessible (for example, 10000)" } } }, diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index cde90e127f3..0856c39946b 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -857,16 +857,18 @@ class AlexaInputController(AlexaCapability): def inputs(self) -> list[dict[str, str]] | None: """Return the list of valid supported inputs.""" - source_list: list[str] = self.entity.attributes.get( + source_list: list[Any] = self.entity.attributes.get( media_player.ATTR_INPUT_SOURCE_LIST, [] ) return AlexaInputController.get_valid_inputs(source_list) @staticmethod - def get_valid_inputs(source_list: list[str]) -> list[dict[str, str]]: + def get_valid_inputs(source_list: list[Any]) -> list[dict[str, str]]: """Return list of supported inputs.""" input_list: list[dict[str, str]] = [] for source in source_list: + if not isinstance(source, str): + continue formatted_source = ( source.lower().replace("-", "").replace("_", "").replace(" ", "") ) diff --git a/homeassistant/components/android_ip_webcam/strings.json b/homeassistant/components/android_ip_webcam/strings.json index db21a690984..57e5452b900 100644 --- a/homeassistant/components/android_ip_webcam/strings.json +++ b/homeassistant/components/android_ip_webcam/strings.json @@ -7,6 +7,9 @@ "port": "[%key:common::config_flow::data::port%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The IP address of the device running the Android IP Webcam app. The IP address is shown in the app once you start the server." } } }, diff --git a/homeassistant/components/apcupsd/__init__.py b/homeassistant/components/apcupsd/__init__.py index 8d7c6b2f46d..550e1014d2a 100644 --- a/homeassistant/components/apcupsd/__init__.py +++ b/homeassistant/components/apcupsd/__init__.py @@ -1,44 +1,34 @@ """Support for APCUPSd via its Network Information Server (NIS).""" from __future__ import annotations -from datetime import timedelta import logging -from typing import Any, Final - -from apcaccess import status +from typing import Final from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.util import Throttle + +from .const import DOMAIN +from .coordinator import APCUPSdCoordinator _LOGGER = logging.getLogger(__name__) -DOMAIN: Final = "apcupsd" -VALUE_ONLINE: Final = 8 PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR) -MIN_TIME_BETWEEN_UPDATES: Final = timedelta(seconds=60) CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Use config values to set up a function enabling status retrieval.""" - data_service = APCUPSdData( - config_entry.data[CONF_HOST], config_entry.data[CONF_PORT] - ) + host, port = config_entry.data[CONF_HOST], config_entry.data[CONF_PORT] + coordinator = APCUPSdCoordinator(hass, host, port) - try: - await hass.async_add_executor_job(data_service.update) - except OSError as ex: - _LOGGER.error("Failure while testing APCUPSd status retrieval: %s", ex) - return False + await coordinator.async_config_entry_first_refresh() - # Store the data service object. + # Store the coordinator for later uses. hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][config_entry.entry_id] = data_service + hass.data[DOMAIN][config_entry.entry_id] = coordinator # Forward the config entries to the supported platforms. await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) @@ -51,66 +41,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if unload_ok and DOMAIN in hass.data: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -class APCUPSdData: - """Stores the data retrieved from APCUPSd. - - For each entity to use, acts as the single point responsible for fetching - updates from the server. - """ - - def __init__(self, host: str, port: int) -> None: - """Initialize the data object.""" - self._host = host - self._port = port - self.status: dict[str, str] = {} - - @property - def name(self) -> str | None: - """Return the name of the UPS, if available.""" - return self.status.get("UPSNAME") - - @property - def model(self) -> str | None: - """Return the model of the UPS, if available.""" - # Different UPS models may report slightly different keys for model, here we - # try them all. - for model_key in ("APCMODEL", "MODEL"): - if model_key in self.status: - return self.status[model_key] - return None - - @property - def serial_no(self) -> str | None: - """Return the unique serial number of the UPS, if available.""" - return self.status.get("SERIALNO") - - @property - def statflag(self) -> str | None: - """Return the STATFLAG indicating the status of the UPS, if available.""" - return self.status.get("STATFLAG") - - @property - def device_info(self) -> DeviceInfo | None: - """Return the DeviceInfo of this APC UPS for the sensors, if serial number is available.""" - if self.serial_no is None: - return None - - return DeviceInfo( - identifiers={(DOMAIN, self.serial_no)}, - model=self.model, - manufacturer="APC", - name=self.name if self.name is not None else "APC UPS", - hw_version=self.status.get("FIRMWARE"), - sw_version=self.status.get("VERSION"), - ) - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - def update(self, **kwargs: Any) -> None: - """Fetch the latest status from APCUPSd. - - Note that the result dict uses upper case for each resource, where our - integration uses lower cases as keys internally. - """ - self.status = status.parse(status.get(host=self._host, port=self._port)) diff --git a/homeassistant/components/apcupsd/binary_sensor.py b/homeassistant/components/apcupsd/binary_sensor.py index bac8d18d58b..76e88689ca5 100644 --- a/homeassistant/components/apcupsd/binary_sensor.py +++ b/homeassistant/components/apcupsd/binary_sensor.py @@ -2,6 +2,7 @@ from __future__ import annotations import logging +from typing import Final from homeassistant.components.binary_sensor import ( BinarySensorEntity, @@ -10,8 +11,9 @@ from homeassistant.components.binary_sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import DOMAIN, VALUE_ONLINE, APCUPSdData +from . import DOMAIN, APCUPSdCoordinator _LOGGER = logging.getLogger(__name__) _DESCRIPTION = BinarySensorEntityDescription( @@ -19,6 +21,8 @@ _DESCRIPTION = BinarySensorEntityDescription( name="UPS Online Status", icon="mdi:heart", ) +# The bit in STATFLAG that indicates the online status of the APC UPS. +_VALUE_ONLINE_MASK: Final = 0b1000 async def async_setup_entry( @@ -27,50 +31,36 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up an APCUPSd Online Status binary sensor.""" - data_service: APCUPSdData = hass.data[DOMAIN][config_entry.entry_id] + coordinator: APCUPSdCoordinator = hass.data[DOMAIN][config_entry.entry_id] # Do not create the binary sensor if APCUPSd does not provide STATFLAG field for us # to determine the online status. - if data_service.statflag is None: + if _DESCRIPTION.key.upper() not in coordinator.data: return - async_add_entities( - [OnlineStatus(data_service, _DESCRIPTION)], - update_before_add=True, - ) + async_add_entities([OnlineStatus(coordinator, _DESCRIPTION)]) -class OnlineStatus(BinarySensorEntity): +class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity): """Representation of a UPS online status.""" def __init__( self, - data_service: APCUPSdData, + coordinator: APCUPSdCoordinator, description: BinarySensorEntityDescription, ) -> None: """Initialize the APCUPSd binary device.""" + super().__init__(coordinator, context=description.key.upper()) + # Set up unique id and device info if serial number is available. - if (serial_no := data_service.serial_no) is not None: + if (serial_no := coordinator.ups_serial_no) is not None: self._attr_unique_id = f"{serial_no}_{description.key}" - self._attr_device_info = data_service.device_info - self.entity_description = description - self._data_service = data_service + self._attr_device_info = coordinator.device_info - def update(self) -> None: - """Get the status report from APCUPSd and set this entity's state.""" - try: - self._data_service.update() - except OSError as ex: - if self._attr_available: - self._attr_available = False - _LOGGER.exception("Got exception while fetching state: %s", ex) - return - - self._attr_available = True + @property + def is_on(self) -> bool | None: + """Returns true if the UPS is online.""" + # Check if ONLINE bit is set in STATFLAG. key = self.entity_description.key.upper() - if key not in self._data_service.status: - self._attr_is_on = None - return - - self._attr_is_on = int(self._data_service.status[key], 16) & VALUE_ONLINE > 0 + return int(self.coordinator.data[key], 16) & _VALUE_ONLINE_MASK != 0 diff --git a/homeassistant/components/apcupsd/config_flow.py b/homeassistant/components/apcupsd/config_flow.py index f1ce20694c7..57002d7a2b2 100644 --- a/homeassistant/components/apcupsd/config_flow.py +++ b/homeassistant/components/apcupsd/config_flow.py @@ -1,6 +1,7 @@ """Config flow for APCUPSd integration.""" from __future__ import annotations +import asyncio from typing import Any import voluptuous as vol @@ -10,8 +11,9 @@ from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import selector import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.update_coordinator import UpdateFailed -from . import DOMAIN, APCUPSdData +from . import DOMAIN, APCUPSdCoordinator _PORT_SELECTOR = vol.All( selector.NumberSelector( @@ -43,36 +45,37 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is None: return self.async_show_form(step_id="user", data_schema=_SCHEMA) + host, port = user_input[CONF_HOST], user_input[CONF_PORT] + # Abort if an entry with same host and port is present. - self._async_abort_entries_match( - {CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]} - ) + self._async_abort_entries_match({CONF_HOST: host, CONF_PORT: port}) # Test the connection to the host and get the current status for serial number. - data_service = APCUPSdData(user_input[CONF_HOST], user_input[CONF_PORT]) - try: - await self.hass.async_add_executor_job(data_service.update) - except OSError: + coordinator = APCUPSdCoordinator(self.hass, host, port) + + await coordinator.async_request_refresh() + await self.hass.async_block_till_done() + if isinstance(coordinator.last_exception, (UpdateFailed, asyncio.TimeoutError)): errors = {"base": "cannot_connect"} return self.async_show_form( step_id="user", data_schema=_SCHEMA, errors=errors ) - if not data_service.status: + if not coordinator.data: return self.async_abort(reason="no_status") # We _try_ to use the serial number of the UPS as the unique id since this field # is not guaranteed to exist on all APC UPS models. - await self.async_set_unique_id(data_service.serial_no) + await self.async_set_unique_id(coordinator.ups_serial_no) self._abort_if_unique_id_configured() title = "APC UPS" - if data_service.name is not None: - title = data_service.name - elif data_service.model is not None: - title = data_service.model - elif data_service.serial_no is not None: - title = data_service.serial_no + if coordinator.ups_name is not None: + title = coordinator.ups_name + elif coordinator.ups_model is not None: + title = coordinator.ups_model + elif coordinator.ups_serial_no is not None: + title = coordinator.ups_serial_no return self.async_create_entry( title=title, diff --git a/homeassistant/components/apcupsd/const.py b/homeassistant/components/apcupsd/const.py new file mode 100644 index 00000000000..cacc9e29369 --- /dev/null +++ b/homeassistant/components/apcupsd/const.py @@ -0,0 +1,4 @@ +"""Constants for APCUPSd component.""" +from typing import Final + +DOMAIN: Final = "apcupsd" diff --git a/homeassistant/components/apcupsd/coordinator.py b/homeassistant/components/apcupsd/coordinator.py new file mode 100644 index 00000000000..ae4c94a9382 --- /dev/null +++ b/homeassistant/components/apcupsd/coordinator.py @@ -0,0 +1,102 @@ +"""Support for APCUPSd via its Network Information Server (NIS).""" +from __future__ import annotations + +import asyncio +from collections import OrderedDict +from datetime import timedelta +import logging +from typing import Final + +from apcaccess import status + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.debounce import Debouncer +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import ( + REQUEST_REFRESH_DEFAULT_IMMEDIATE, + DataUpdateCoordinator, + UpdateFailed, +) + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) +UPDATE_INTERVAL: Final = timedelta(seconds=60) +REQUEST_REFRESH_COOLDOWN: Final = 5 + + +class APCUPSdCoordinator(DataUpdateCoordinator[OrderedDict[str, str]]): + """Store and coordinate the data retrieved from APCUPSd for all sensors. + + For each entity to use, acts as the single point responsible for fetching + updates from the server. + """ + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, host: str, port: int) -> None: + """Initialize the data object.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=UPDATE_INTERVAL, + request_refresh_debouncer=Debouncer( + hass, + _LOGGER, + cooldown=REQUEST_REFRESH_COOLDOWN, + immediate=REQUEST_REFRESH_DEFAULT_IMMEDIATE, + ), + ) + self._host = host + self._port = port + + @property + def ups_name(self) -> str | None: + """Return the name of the UPS, if available.""" + return self.data.get("UPSNAME") + + @property + def ups_model(self) -> str | None: + """Return the model of the UPS, if available.""" + # Different UPS models may report slightly different keys for model, here we + # try them all. + for model_key in ("APCMODEL", "MODEL"): + if model_key in self.data: + return self.data[model_key] + return None + + @property + def ups_serial_no(self) -> str | None: + """Return the unique serial number of the UPS, if available.""" + return self.data.get("SERIALNO") + + @property + def device_info(self) -> DeviceInfo: + """Return the DeviceInfo of this APC UPS, if serial number is available.""" + return DeviceInfo( + identifiers={(DOMAIN, self.ups_serial_no or self.config_entry.entry_id)}, + model=self.ups_model, + manufacturer="APC", + name=self.ups_name if self.ups_name else "APC UPS", + hw_version=self.data.get("FIRMWARE"), + sw_version=self.data.get("VERSION"), + ) + + async def _async_update_data(self) -> OrderedDict[str, str]: + """Fetch the latest status from APCUPSd. + + Note that the result dict uses upper case for each resource, where our + integration uses lower cases as keys internally. + """ + + async with asyncio.timeout(10): + try: + raw = await self.hass.async_add_executor_job( + status.get, self._host, self._port + ) + result: OrderedDict[str, str] = status.parse(raw) + return result + except OSError as error: + raise UpdateFailed(error) from error diff --git a/homeassistant/components/apcupsd/manifest.json b/homeassistant/components/apcupsd/manifest.json index cd7e2a116b3..55b66f0c0a0 100644 --- a/homeassistant/components/apcupsd/manifest.json +++ b/homeassistant/components/apcupsd/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/apcupsd", "iot_class": "local_polling", "loggers": ["apcaccess"], + "quality_scale": "silver", "requirements": ["apcaccess==0.0.13"] } diff --git a/homeassistant/components/apcupsd/sensor.py b/homeassistant/components/apcupsd/sensor.py index 745be7e2d63..71dc9940b72 100644 --- a/homeassistant/components/apcupsd/sensor.py +++ b/homeassistant/components/apcupsd/sensor.py @@ -20,10 +20,11 @@ from homeassistant.const import ( UnitOfTemperature, UnitOfTime, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import DOMAIN, APCUPSdData +from . import DOMAIN, APCUPSdCoordinator _LOGGER = logging.getLogger(__name__) @@ -452,11 +453,11 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the APCUPSd sensors from config entries.""" - data_service: APCUPSdData = hass.data[DOMAIN][config_entry.entry_id] + coordinator: APCUPSdCoordinator = hass.data[DOMAIN][config_entry.entry_id] - # The resources from data service are in upper-case by default, but we use - # lower cases throughout this integration. - available_resources: set[str] = {k.lower() for k, _ in data_service.status.items()} + # The resource keys in the data dict collected in the coordinator is in upper-case + # by default, but we use lower cases throughout this integration. + available_resources: set[str] = {k.lower() for k, _ in coordinator.data.items()} entities = [] for resource in available_resources: @@ -464,9 +465,9 @@ async def async_setup_entry( _LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper()) continue - entities.append(APCUPSdSensor(data_service, SENSORS[resource])) + entities.append(APCUPSdSensor(coordinator, SENSORS[resource])) - async_add_entities(entities, update_before_add=True) + async_add_entities(entities) def infer_unit(value: str) -> tuple[str, str | None]: @@ -483,41 +484,36 @@ def infer_unit(value: str) -> tuple[str, str | None]: return value, None -class APCUPSdSensor(SensorEntity): +class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity): """Representation of a sensor entity for APCUPSd status values.""" def __init__( self, - data_service: APCUPSdData, + coordinator: APCUPSdCoordinator, description: SensorEntityDescription, ) -> None: """Initialize the sensor.""" + super().__init__(coordinator=coordinator, context=description.key.upper()) + # Set up unique id and device info if serial number is available. - if (serial_no := data_service.serial_no) is not None: + if (serial_no := coordinator.ups_serial_no) is not None: self._attr_unique_id = f"{serial_no}_{description.key}" - self._attr_device_info = data_service.device_info self.entity_description = description - self._data_service = data_service + self._attr_device_info = coordinator.device_info - def update(self) -> None: - """Get the latest status and use it to update our sensor state.""" - try: - self._data_service.update() - except OSError as ex: - if self._attr_available: - self._attr_available = False - _LOGGER.exception("Got exception while fetching state: %s", ex) - return + # Initial update of attributes. + self._update_attrs() - self._attr_available = True + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attrs() + self.async_write_ha_state() + + def _update_attrs(self) -> None: + """Update sensor attributes based on coordinator data.""" key = self.entity_description.key.upper() - if key not in self._data_service.status: - self._attr_native_value = None - return - - self._attr_native_value, inferred_unit = infer_unit( - self._data_service.status[key] - ) + self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key]) if not self.native_unit_of_measurement: self._attr_native_unit_of_measurement = inferred_unit diff --git a/homeassistant/components/api/__init__.py b/homeassistant/components/api/__init__.py index 6bb3cc34050..057e85613fd 100644 --- a/homeassistant/components/api/__init__.py +++ b/homeassistant/components/api/__init__.py @@ -41,7 +41,6 @@ from homeassistant.exceptions import ( Unauthorized, ) from homeassistant.helpers import config_validation as cv, template -from homeassistant.helpers.aiohttp_compat import enable_compression from homeassistant.helpers.event import EventStateChangedData from homeassistant.helpers.json import json_dumps from homeassistant.helpers.service import async_get_all_descriptions @@ -218,9 +217,11 @@ class APIStatesView(HomeAssistantView): if entity_perm(state.entity_id, "read") ) response = web.Response( - body=f'[{",".join(states)}]', content_type=CONTENT_TYPE_JSON + body=f'[{",".join(states)}]', + content_type=CONTENT_TYPE_JSON, + zlib_executor_size=32768, ) - enable_compression(response) + response.enable_compression() return response @@ -390,17 +391,14 @@ class APIDomainServicesView(HomeAssistantView): ) try: - async with timeout(SERVICE_WAIT_TIMEOUT): - # shield the service call from cancellation on connection drop - await shield( - hass.services.async_call( - domain, service, data, blocking=True, context=context - ) + # shield the service call from cancellation on connection drop + await shield( + hass.services.async_call( + domain, service, data, blocking=True, context=context ) + ) except (vol.Invalid, ServiceNotFound) as ex: raise HTTPBadRequest() from ex - except TimeoutError: - pass finally: cancel_listen() diff --git a/homeassistant/components/assist_pipeline/__init__.py b/homeassistant/components/assist_pipeline/__init__.py index 64fe9e1f5f4..6d00f26ee15 100644 --- a/homeassistant/components/assist_pipeline/__init__.py +++ b/homeassistant/components/assist_pipeline/__init__.py @@ -9,7 +9,13 @@ from homeassistant.components import stt from homeassistant.core import Context, HomeAssistant from homeassistant.helpers.typing import ConfigType -from .const import CONF_DEBUG_RECORDING_DIR, DATA_CONFIG, DATA_LAST_WAKE_UP, DOMAIN +from .const import ( + CONF_DEBUG_RECORDING_DIR, + DATA_CONFIG, + DATA_LAST_WAKE_UP, + DOMAIN, + EVENT_RECORDING, +) from .error import PipelineNotFound from .pipeline import ( AudioSettings, @@ -40,6 +46,7 @@ __all__ = ( "PipelineEventType", "PipelineNotFound", "WakeWordSettings", + "EVENT_RECORDING", ) CONFIG_SCHEMA = vol.Schema( diff --git a/homeassistant/components/assist_pipeline/const.py b/homeassistant/components/assist_pipeline/const.py index 84b49fc18fa..091b19db69e 100644 --- a/homeassistant/components/assist_pipeline/const.py +++ b/homeassistant/components/assist_pipeline/const.py @@ -11,3 +11,5 @@ CONF_DEBUG_RECORDING_DIR = "debug_recording_dir" DATA_LAST_WAKE_UP = f"{DOMAIN}.last_wake_up" DEFAULT_WAKE_WORD_COOLDOWN = 2 # seconds + +EVENT_RECORDING = f"{DOMAIN}_recording" diff --git a/homeassistant/components/assist_pipeline/logbook.py b/homeassistant/components/assist_pipeline/logbook.py new file mode 100644 index 00000000000..0c00c57adb9 --- /dev/null +++ b/homeassistant/components/assist_pipeline/logbook.py @@ -0,0 +1,39 @@ +"""Describe assist_pipeline logbook events.""" +from __future__ import annotations + +from collections.abc import Callable + +from homeassistant.components.logbook import LOGBOOK_ENTRY_MESSAGE, LOGBOOK_ENTRY_NAME +from homeassistant.const import ATTR_DEVICE_ID +from homeassistant.core import Event, HomeAssistant, callback +import homeassistant.helpers.device_registry as dr + +from .const import DOMAIN, EVENT_RECORDING + + +@callback +def async_describe_events( + hass: HomeAssistant, + async_describe_event: Callable[[str, str, Callable[[Event], dict[str, str]]], None], +) -> None: + """Describe logbook events.""" + device_registry = dr.async_get(hass) + + @callback + def async_describe_logbook_event(event: Event) -> dict[str, str]: + """Describe logbook event.""" + device: dr.DeviceEntry | None = None + device_name: str = "Unknown device" + + device = device_registry.devices[event.data[ATTR_DEVICE_ID]] + if device: + device_name = device.name_by_user or device.name or "Unknown device" + + message = f"{device_name} captured an audio sample" + + return { + LOGBOOK_ENTRY_NAME: device_name, + LOGBOOK_ENTRY_MESSAGE: message, + } + + async_describe_event(DOMAIN, EVENT_RECORDING, async_describe_logbook_event) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index c6d0f6c5435..4f2a9a8d99b 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -320,7 +320,7 @@ class Pipeline: wake_word_entity: str | None wake_word_id: str | None - id: str = field(default_factory=ulid_util.ulid) + id: str = field(default_factory=ulid_util.ulid_now) @classmethod def from_json(cls, data: dict[str, Any]) -> Pipeline: @@ -482,7 +482,7 @@ class PipelineRun: wake_word_settings: WakeWordSettings | None = None audio_settings: AudioSettings = field(default_factory=AudioSettings) - id: str = field(default_factory=ulid_util.ulid) + id: str = field(default_factory=ulid_util.ulid_now) stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False) tts_engine: str = field(init=False, repr=False) tts_options: dict | None = field(init=False, default=None) @@ -503,6 +503,9 @@ class PipelineRun: audio_processor_buffer: AudioBuffer = field(init=False, repr=False) """Buffer used when splitting audio into chunks for audio processing""" + _device_id: str | None = None + """Optional device id set during run start.""" + def __post_init__(self) -> None: """Set language for pipeline.""" self.language = self.pipeline.language or self.hass.config.language @@ -554,7 +557,8 @@ class PipelineRun: def start(self, device_id: str | None) -> None: """Emit run start event.""" - self._start_debug_recording_thread(device_id) + self._device_id = device_id + self._start_debug_recording_thread() data = { "pipeline": self.pipeline.id, @@ -567,6 +571,9 @@ class PipelineRun: async def end(self) -> None: """Emit run end event.""" + # Signal end of stream to listeners + self._capture_chunk(None) + # Stop the recording thread before emitting run-end. # This ensures that files are properly closed if the event handler reads them. await self._stop_debug_recording_thread() @@ -746,9 +753,7 @@ class PipelineRun: if self.abort_wake_word_detection: raise WakeWordDetectionAborted - if self.debug_recording_queue is not None: - self.debug_recording_queue.put_nowait(chunk.audio) - + self._capture_chunk(chunk.audio) yield chunk.audio, chunk.timestamp_ms # Wake-word-detection occurs *after* the wake word was actually @@ -870,8 +875,7 @@ class PipelineRun: chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate sent_vad_start = False async for chunk in audio_stream: - if self.debug_recording_queue is not None: - self.debug_recording_queue.put_nowait(chunk.audio) + self._capture_chunk(chunk.audio) if stt_vad is not None: if not stt_vad.process(chunk_seconds, chunk.is_speech): @@ -1020,44 +1024,64 @@ class PipelineRun: ) ) - try: - # Synthesize audio and get URL - tts_media_id = tts_generate_media_source_id( - self.hass, - tts_input, - engine=self.tts_engine, - language=self.pipeline.tts_language, - options=self.tts_options, - ) - tts_media = await media_source.async_resolve_media( - self.hass, - tts_media_id, - None, - ) - except Exception as src_error: - _LOGGER.exception("Unexpected error during text-to-speech") - raise TextToSpeechError( - code="tts-failed", - message="Unexpected error during text-to-speech", - ) from src_error + if tts_input := tts_input.strip(): + try: + # Synthesize audio and get URL + tts_media_id = tts_generate_media_source_id( + self.hass, + tts_input, + engine=self.tts_engine, + language=self.pipeline.tts_language, + options=self.tts_options, + ) + tts_media = await media_source.async_resolve_media( + self.hass, + tts_media_id, + None, + ) + except Exception as src_error: + _LOGGER.exception("Unexpected error during text-to-speech") + raise TextToSpeechError( + code="tts-failed", + message="Unexpected error during text-to-speech", + ) from src_error - _LOGGER.debug("TTS result %s", tts_media) + _LOGGER.debug("TTS result %s", tts_media) + tts_output = { + "media_id": tts_media_id, + **asdict(tts_media), + } + else: + tts_output = {} self.process_event( - PipelineEvent( - PipelineEventType.TTS_END, - { - "tts_output": { - "media_id": tts_media_id, - **asdict(tts_media), - } - }, - ) + PipelineEvent(PipelineEventType.TTS_END, {"tts_output": tts_output}) ) return tts_media.url - def _start_debug_recording_thread(self, device_id: str | None) -> None: + def _capture_chunk(self, audio_bytes: bytes | None) -> None: + """Forward audio chunk to various capturing mechanisms.""" + if self.debug_recording_queue is not None: + # Forward to debug WAV file recording + self.debug_recording_queue.put_nowait(audio_bytes) + + if self._device_id is None: + return + + # Forward to device audio capture + pipeline_data: PipelineData = self.hass.data[DOMAIN] + audio_queue = pipeline_data.device_audio_queues.get(self._device_id) + if audio_queue is None: + return + + try: + audio_queue.queue.put_nowait(audio_bytes) + except asyncio.QueueFull: + audio_queue.overflow = True + _LOGGER.warning("Audio queue full for device %s", self._device_id) + + def _start_debug_recording_thread(self) -> None: """Start thread to record wake/stt audio if debug_recording_dir is set.""" if self.debug_recording_thread is not None: # Already started @@ -1068,7 +1092,7 @@ class PipelineRun: if debug_recording_dir := self.hass.data[DATA_CONFIG].get( CONF_DEBUG_RECORDING_DIR ): - if device_id is None: + if self._device_id is None: # // run_recording_dir = ( Path(debug_recording_dir) @@ -1079,7 +1103,7 @@ class PipelineRun: # /// run_recording_dir = ( Path(debug_recording_dir) - / device_id + / self._device_id / self.pipeline.name / str(time.monotonic_ns()) ) @@ -1100,8 +1124,8 @@ class PipelineRun: # Not running return - # Signal thread to stop gracefully - self.debug_recording_queue.put(None) + # NOTE: Expecting a None to have been put in self.debug_recording_queue + # in self.end() to signal the thread to stop. # Wait until the thread has finished to ensure that files are fully written await self.hass.async_add_executor_job(self.debug_recording_thread.join) @@ -1290,9 +1314,9 @@ class PipelineInput: if stt_audio_buffer: # Send audio in the buffer first to speech-to-text, then move on to stt_stream. # This is basically an async itertools.chain. - async def buffer_then_audio_stream() -> AsyncGenerator[ - ProcessedAudioChunk, None - ]: + async def buffer_then_audio_stream() -> ( + AsyncGenerator[ProcessedAudioChunk, None] + ): # Buffered audio for chunk in stt_audio_buffer: yield chunk @@ -1451,7 +1475,7 @@ class PipelineStorageCollection( @callback def _get_suggested_id(self, info: dict) -> str: """Suggest an ID based on the config.""" - return ulid_util.ulid() + return ulid_util.ulid_now() async def _update_data(self, item: Pipeline, update_data: dict) -> Pipeline: """Return a new updated item.""" @@ -1632,6 +1656,20 @@ class PipelineRuns: pipeline_run.abort_wake_word_detection = True +@dataclass +class DeviceAudioQueue: + """Audio capture queue for a satellite device.""" + + queue: asyncio.Queue[bytes | None] + """Queue of audio chunks (None = stop signal)""" + + id: str = field(default_factory=ulid_util.ulid_now) + """Unique id to ensure the correct audio queue is cleaned up in websocket API.""" + + overflow: bool = False + """Flag to be set if audio samples were dropped because the queue was full.""" + + class PipelineData: """Store and debug data stored in hass.data.""" @@ -1641,6 +1679,7 @@ class PipelineData: self.pipeline_debug: dict[str, LimitedSizeDict[str, PipelineRunDebug]] = {} self.pipeline_devices: set[str] = set() self.pipeline_runs = PipelineRuns(pipeline_store) + self.device_audio_queues: dict[str, DeviceAudioQueue] = {} @dataclass diff --git a/homeassistant/components/assist_pipeline/websocket_api.py b/homeassistant/components/assist_pipeline/websocket_api.py index fda3e266490..89cced519df 100644 --- a/homeassistant/components/assist_pipeline/websocket_api.py +++ b/homeassistant/components/assist_pipeline/websocket_api.py @@ -3,22 +3,31 @@ import asyncio # Suppressing disable=deprecated-module is needed for Python 3.11 import audioop # pylint: disable=deprecated-module +import base64 from collections.abc import AsyncGenerator, Callable +import contextlib import logging -from typing import Any +import math +from typing import Any, Final import voluptuous as vol from homeassistant.components import conversation, stt, tts, websocket_api -from homeassistant.const import MATCH_ALL +from homeassistant.const import ATTR_DEVICE_ID, ATTR_SECONDS, MATCH_ALL from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.util import language as language_util -from .const import DEFAULT_PIPELINE_TIMEOUT, DEFAULT_WAKE_WORD_TIMEOUT, DOMAIN +from .const import ( + DEFAULT_PIPELINE_TIMEOUT, + DEFAULT_WAKE_WORD_TIMEOUT, + DOMAIN, + EVENT_RECORDING, +) from .error import PipelineNotFound from .pipeline import ( AudioSettings, + DeviceAudioQueue, PipelineData, PipelineError, PipelineEvent, @@ -32,6 +41,11 @@ from .pipeline import ( _LOGGER = logging.getLogger(__name__) +CAPTURE_RATE: Final = 16000 +CAPTURE_WIDTH: Final = 2 +CAPTURE_CHANNELS: Final = 1 +MAX_CAPTURE_TIMEOUT: Final = 60.0 + @callback def async_register_websocket_api(hass: HomeAssistant) -> None: @@ -40,6 +54,7 @@ def async_register_websocket_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_list_languages) websocket_api.async_register_command(hass, websocket_list_runs) websocket_api.async_register_command(hass, websocket_get_run) + websocket_api.async_register_command(hass, websocket_device_capture) @websocket_api.websocket_command( @@ -371,3 +386,100 @@ async def websocket_list_languages( else pipeline_languages }, ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "assist_pipeline/device/capture", + vol.Required("device_id"): str, + vol.Required("timeout"): vol.All( + # 0 < timeout <= MAX_CAPTURE_TIMEOUT + vol.Coerce(float), + vol.Range(min=0, min_included=False, max=MAX_CAPTURE_TIMEOUT), + ), + } +) +@websocket_api.async_response +async def websocket_device_capture( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Capture raw audio from a satellite device and forward to client.""" + pipeline_data: PipelineData = hass.data[DOMAIN] + device_id = msg["device_id"] + + # Number of seconds to record audio in wall clock time + timeout_seconds = msg["timeout"] + + # We don't know the chunk size, so the upper bound is calculated assuming a + # single sample (16 bits) per queue item. + max_queue_items = ( + # +1 for None to signal end + int(math.ceil(timeout_seconds * CAPTURE_RATE)) + 1 + ) + + audio_queue = DeviceAudioQueue(queue=asyncio.Queue(maxsize=max_queue_items)) + + # Running simultaneous captures for a single device will not work by design. + # The new capture will cause the old capture to stop. + if ( + old_audio_queue := pipeline_data.device_audio_queues.pop(device_id, None) + ) is not None: + with contextlib.suppress(asyncio.QueueFull): + # Signal other websocket command that we're taking over + old_audio_queue.queue.put_nowait(None) + + # Only one client can be capturing audio at a time + pipeline_data.device_audio_queues[device_id] = audio_queue + + def clean_up_queue() -> None: + # Clean up our audio queue + maybe_audio_queue = pipeline_data.device_audio_queues.get(device_id) + if (maybe_audio_queue is not None) and (maybe_audio_queue.id == audio_queue.id): + # Only pop if this is our queue + pipeline_data.device_audio_queues.pop(device_id) + + # Unsubscribe cleans up queue + connection.subscriptions[msg["id"]] = clean_up_queue + + # Audio will follow as events + connection.send_result(msg["id"]) + + # Record to logbook + hass.bus.async_fire( + EVENT_RECORDING, + { + ATTR_DEVICE_ID: device_id, + ATTR_SECONDS: timeout_seconds, + }, + ) + + try: + with contextlib.suppress(asyncio.TimeoutError): + async with asyncio.timeout(timeout_seconds): + while True: + # Send audio chunks encoded as base64 + audio_bytes = await audio_queue.queue.get() + if audio_bytes is None: + # Signal to stop + break + + connection.send_event( + msg["id"], + { + "type": "audio", + "rate": CAPTURE_RATE, # hertz + "width": CAPTURE_WIDTH, # bytes + "channels": CAPTURE_CHANNELS, + "audio": base64.b64encode(audio_bytes).decode("ascii"), + }, + ) + + # Capture has ended + connection.send_event( + msg["id"], {"type": "end", "overflow": audio_queue.overflow} + ) + finally: + clean_up_queue() diff --git a/homeassistant/components/asuswrt/bridge.py b/homeassistant/components/asuswrt/bridge.py index bbde9271984..83f99ecc76a 100644 --- a/homeassistant/components/asuswrt/bridge.py +++ b/homeassistant/components/asuswrt/bridge.py @@ -9,6 +9,8 @@ import logging from typing import Any, TypeVar, cast from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy +from aiohttp import ClientSession +from pyasuswrt import AsusWrtError, AsusWrtHttp from homeassistant.const import ( CONF_HOST, @@ -19,6 +21,7 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.update_coordinator import UpdateFailed @@ -31,6 +34,8 @@ from .const import ( DEFAULT_INTERFACE, KEY_METHOD, KEY_SENSORS, + PROTOCOL_HTTP, + PROTOCOL_HTTPS, PROTOCOL_TELNET, SENSORS_BYTES, SENSORS_LOAD_AVG, @@ -74,6 +79,8 @@ def handle_errors_and_zip( raise UpdateFailed("Received invalid data type") return data + if isinstance(data, dict): + return dict(zip(keys, list(data.values()))) if not isinstance(data, list): raise UpdateFailed("Received invalid data type") return dict(zip(keys, data)) @@ -91,6 +98,9 @@ class AsusWrtBridge(ABC): hass: HomeAssistant, conf: dict[str, Any], options: dict[str, Any] | None = None ) -> AsusWrtBridge: """Get Bridge instance.""" + if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP): + session = async_get_clientsession(hass) + return AsusWrtHttpBridge(conf, session) return AsusWrtLegacyBridge(conf, options) def __init__(self, host: str) -> None: @@ -286,3 +296,116 @@ class AsusWrtLegacyBridge(AsusWrtBridge): async def _get_temperatures(self) -> Any: """Fetch temperatures information from the router.""" return await self._api.async_get_temperature() + + +class AsusWrtHttpBridge(AsusWrtBridge): + """The Bridge that use HTTP library.""" + + def __init__(self, conf: dict[str, Any], session: ClientSession) -> None: + """Initialize Bridge that use HTTP library.""" + super().__init__(conf[CONF_HOST]) + self._api: AsusWrtHttp = self._get_api(conf, session) + + @staticmethod + def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusWrtHttp: + """Get the AsusWrtHttp API.""" + return AsusWrtHttp( + conf[CONF_HOST], + conf[CONF_USERNAME], + conf.get(CONF_PASSWORD, ""), + use_https=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS, + port=conf.get(CONF_PORT), + session=session, + ) + + @property + def is_connected(self) -> bool: + """Get connected status.""" + return cast(bool, self._api.is_connected) + + async def async_connect(self) -> None: + """Connect to the device.""" + await self._api.async_connect() + + # get main router properties + if mac := self._api.mac: + self._label_mac = format_mac(mac) + self._firmware = self._api.firmware + self._model = self._api.model + + async def async_disconnect(self) -> None: + """Disconnect to the device.""" + await self._api.async_disconnect() + + async def async_get_connected_devices(self) -> dict[str, WrtDevice]: + """Get list of connected devices.""" + try: + api_devices = await self._api.async_get_connected_devices() + except AsusWrtError as exc: + raise UpdateFailed(exc) from exc + return { + format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node) + for mac, dev in api_devices.items() + } + + async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]: + """Return a dictionary of available sensors for this bridge.""" + sensors_temperatures = await self._get_available_temperature_sensors() + sensors_types = { + SENSORS_TYPE_BYTES: { + KEY_SENSORS: SENSORS_BYTES, + KEY_METHOD: self._get_bytes, + }, + SENSORS_TYPE_LOAD_AVG: { + KEY_SENSORS: SENSORS_LOAD_AVG, + KEY_METHOD: self._get_load_avg, + }, + SENSORS_TYPE_RATES: { + KEY_SENSORS: SENSORS_RATES, + KEY_METHOD: self._get_rates, + }, + SENSORS_TYPE_TEMPERATURES: { + KEY_SENSORS: sensors_temperatures, + KEY_METHOD: self._get_temperatures, + }, + } + return sensors_types + + async def _get_available_temperature_sensors(self) -> list[str]: + """Check which temperature information is available on the router.""" + try: + available_temps = await self._api.async_get_temperatures() + available_sensors = [ + t for t in SENSORS_TEMPERATURES if t in available_temps + ] + except AsusWrtError as exc: + _LOGGER.warning( + ( + "Failed checking temperature sensor availability for ASUS router" + " %s. Exception: %s" + ), + self.host, + exc, + ) + return [] + return available_sensors + + @handle_errors_and_zip(AsusWrtError, SENSORS_BYTES) + async def _get_bytes(self) -> Any: + """Fetch byte information from the router.""" + return await self._api.async_get_traffic_bytes() + + @handle_errors_and_zip(AsusWrtError, SENSORS_RATES) + async def _get_rates(self) -> Any: + """Fetch rates information from the router.""" + return await self._api.async_get_traffic_rates() + + @handle_errors_and_zip(AsusWrtError, SENSORS_LOAD_AVG) + async def _get_load_avg(self) -> Any: + """Fetch cpu load avg information from the router.""" + return await self._api.async_get_loadavg() + + @handle_errors_and_zip(AsusWrtError, None) + async def _get_temperatures(self) -> Any: + """Fetch temperatures information from the router.""" + return await self._api.async_get_temperatures() diff --git a/homeassistant/components/asuswrt/config_flow.py b/homeassistant/components/asuswrt/config_flow.py index 56569d4f23b..047e9b549d8 100644 --- a/homeassistant/components/asuswrt/config_flow.py +++ b/homeassistant/components/asuswrt/config_flow.py @@ -7,6 +7,7 @@ import os import socket from typing import Any, cast +from pyasuswrt import AsusWrtError import voluptuous as vol from homeassistant.components.device_tracker import ( @@ -15,6 +16,7 @@ from homeassistant.components.device_tracker import ( ) from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( + CONF_BASE, CONF_HOST, CONF_MODE, CONF_PASSWORD, @@ -30,6 +32,7 @@ from homeassistant.helpers.schema_config_entry_flow import ( SchemaFlowFormStep, SchemaOptionsFlowHandler, ) +from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig from .bridge import AsusWrtBridge from .const import ( @@ -44,11 +47,21 @@ from .const import ( DOMAIN, MODE_AP, MODE_ROUTER, + PROTOCOL_HTTP, + PROTOCOL_HTTPS, PROTOCOL_SSH, PROTOCOL_TELNET, ) -LABEL_MAC = "LABEL_MAC" +ALLOWED_PROTOCOL = [ + PROTOCOL_HTTPS, + PROTOCOL_SSH, + PROTOCOL_HTTP, + PROTOCOL_TELNET, +] + +PASS_KEY = "pass_key" +PASS_KEY_MSG = "Only provide password or SSH key file" RESULT_CONN_ERROR = "cannot_connect" RESULT_SUCCESS = "success" @@ -56,14 +69,20 @@ RESULT_UNKNOWN = "unknown" _LOGGER = logging.getLogger(__name__) +LEGACY_SCHEMA = vol.Schema( + { + vol.Required(CONF_MODE, default=MODE_ROUTER): vol.In( + {MODE_ROUTER: "Router", MODE_AP: "Access Point"} + ), + } +) + OPTIONS_SCHEMA = vol.Schema( { vol.Optional( CONF_CONSIDER_HOME, default=DEFAULT_CONSIDER_HOME.total_seconds() ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)), vol.Optional(CONF_TRACK_UNKNOWN, default=DEFAULT_TRACK_UNKNOWN): bool, - vol.Required(CONF_INTERFACE, default=DEFAULT_INTERFACE): str, - vol.Required(CONF_DNSMASQ, default=DEFAULT_DNSMASQ): str, } ) @@ -72,12 +91,22 @@ async def get_options_schema(handler: SchemaCommonFlowHandler) -> vol.Schema: """Get options schema.""" options_flow: SchemaOptionsFlowHandler options_flow = cast(SchemaOptionsFlowHandler, handler.parent_handler) - if options_flow.config_entry.data[CONF_MODE] == MODE_AP: - return OPTIONS_SCHEMA.extend( + used_protocol = options_flow.config_entry.data[CONF_PROTOCOL] + if used_protocol in [PROTOCOL_SSH, PROTOCOL_TELNET]: + data_schema = OPTIONS_SCHEMA.extend( { - vol.Optional(CONF_REQUIRE_IP, default=True): bool, + vol.Required(CONF_INTERFACE, default=DEFAULT_INTERFACE): str, + vol.Required(CONF_DNSMASQ, default=DEFAULT_DNSMASQ): str, } ) + if options_flow.config_entry.data[CONF_MODE] == MODE_AP: + return data_schema.extend( + { + vol.Optional(CONF_REQUIRE_IP, default=True): bool, + } + ) + return data_schema + return OPTIONS_SCHEMA @@ -101,45 +130,47 @@ def _get_ip(host: str) -> str | None: class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN): - """Handle a config flow.""" + """Handle a config flow for AsusWRT.""" VERSION = 1 + def __init__(self) -> None: + """Initialize the AsusWrt config flow.""" + self._config_data: dict[str, Any] = {} + @callback - def _show_setup_form( - self, - user_input: dict[str, Any] | None = None, - errors: dict[str, str] | None = None, - ) -> FlowResult: + def _show_setup_form(self, error: str | None = None) -> FlowResult: """Show the setup form to the user.""" - if user_input is None: - user_input = {} + user_input = self._config_data - adv_schema = {} - conf_password = vol.Required(CONF_PASSWORD) if self.show_advanced_options: - conf_password = vol.Optional(CONF_PASSWORD) - adv_schema[vol.Optional(CONF_PORT)] = cv.port - adv_schema[vol.Optional(CONF_SSH_KEY)] = str + add_schema = { + vol.Exclusive(CONF_PASSWORD, PASS_KEY, PASS_KEY_MSG): str, + vol.Optional(CONF_PORT): cv.port, + vol.Exclusive(CONF_SSH_KEY, PASS_KEY, PASS_KEY_MSG): str, + } + else: + add_schema = {vol.Required(CONF_PASSWORD): str} schema = { vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str, vol.Required(CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")): str, - conf_password: str, - vol.Required(CONF_PROTOCOL, default=PROTOCOL_SSH): vol.In( - {PROTOCOL_SSH: "SSH", PROTOCOL_TELNET: "Telnet"} - ), - **adv_schema, - vol.Required(CONF_MODE, default=MODE_ROUTER): vol.In( - {MODE_ROUTER: "Router", MODE_AP: "Access Point"} + **add_schema, + vol.Required( + CONF_PROTOCOL, + default=user_input.get(CONF_PROTOCOL, PROTOCOL_HTTPS), + ): SelectSelector( + SelectSelectorConfig( + options=ALLOWED_PROTOCOL, translation_key="protocols" + ) ), } return self.async_show_form( step_id="user", data_schema=vol.Schema(schema), - errors=errors or {}, + errors={CONF_BASE: error} if error else None, ) async def _async_check_connection( @@ -147,25 +178,49 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN): ) -> tuple[str, str | None]: """Attempt to connect the AsusWrt router.""" + api: AsusWrtBridge host: str = user_input[CONF_HOST] - api = AsusWrtBridge.get_bridge(self.hass, user_input) + protocol = user_input[CONF_PROTOCOL] + error: str | None = None + + conf = {**user_input, CONF_MODE: MODE_ROUTER} + api = AsusWrtBridge.get_bridge(self.hass, conf) try: await api.async_connect() - except OSError: - _LOGGER.error("Error connecting to the AsusWrt router at %s", host) - return RESULT_CONN_ERROR, None + except (AsusWrtError, OSError): + _LOGGER.error( + "Error connecting to the AsusWrt router at %s using protocol %s", + host, + protocol, + ) + error = RESULT_CONN_ERROR except Exception: # pylint: disable=broad-except _LOGGER.exception( - "Unknown error connecting with AsusWrt router at %s", host + "Unknown error connecting with AsusWrt router at %s using protocol %s", + host, + protocol, ) - return RESULT_UNKNOWN, None + error = RESULT_UNKNOWN - if not api.is_connected: - _LOGGER.error("Error connecting to the AsusWrt router at %s", host) - return RESULT_CONN_ERROR, None + if error is None: + if not api.is_connected: + _LOGGER.error( + "Error connecting to the AsusWrt router at %s using protocol %s", + host, + protocol, + ) + error = RESULT_CONN_ERROR + if error is not None: + return error, None + + _LOGGER.info( + "Successfully connected to the AsusWrt router at %s using protocol %s", + host, + protocol, + ) unique_id = api.label_mac await api.async_disconnect() @@ -182,51 +237,59 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="no_unique_id") if user_input is None: - return self._show_setup_form(user_input) - - errors: dict[str, str] = {} - host: str = user_input[CONF_HOST] + return self._show_setup_form() + self._config_data = user_input pwd: str | None = user_input.get(CONF_PASSWORD) ssh: str | None = user_input.get(CONF_SSH_KEY) + protocol: str = user_input[CONF_PROTOCOL] + if not pwd and protocol != PROTOCOL_SSH: + return self._show_setup_form(error="pwd_required") if not (pwd or ssh): - errors["base"] = "pwd_or_ssh" - elif ssh: - if pwd: - errors["base"] = "pwd_and_ssh" + return self._show_setup_form(error="pwd_or_ssh") + if ssh and not await self.hass.async_add_executor_job(_is_file, ssh): + return self._show_setup_form(error="ssh_not_file") + + host: str = user_input[CONF_HOST] + if not await self.hass.async_add_executor_job(_get_ip, host): + return self._show_setup_form(error="invalid_host") + + result, unique_id = await self._async_check_connection(user_input) + if result == RESULT_SUCCESS: + if unique_id: + await self.async_set_unique_id(unique_id) + # we allow to configure a single instance without unique id + elif self._async_current_entries(): + return self.async_abort(reason="invalid_unique_id") else: - isfile = await self.hass.async_add_executor_job(_is_file, ssh) - if not isfile: - errors["base"] = "ssh_not_file" - - if not errors: - ip_address = await self.hass.async_add_executor_job(_get_ip, host) - if not ip_address: - errors["base"] = "invalid_host" - - if not errors: - result, unique_id = await self._async_check_connection(user_input) - if result == RESULT_SUCCESS: - if unique_id: - await self.async_set_unique_id(unique_id) - # we allow configure a single instance without unique id - elif self._async_current_entries(): - return self.async_abort(reason="invalid_unique_id") - else: - _LOGGER.warning( - "This device does not provide a valid Unique ID." - " Configuration of multiple instance will not be possible" - ) - - return self.async_create_entry( - title=host, - data=user_input, + _LOGGER.warning( + "This device does not provide a valid Unique ID." + " Configuration of multiple instance will not be possible" ) - errors["base"] = result + if protocol in [PROTOCOL_SSH, PROTOCOL_TELNET]: + return await self.async_step_legacy() + return await self._async_save_entry() - return self._show_setup_form(user_input, errors) + return self._show_setup_form(error=result) + + async def async_step_legacy( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle a flow for legacy settings.""" + if user_input is None: + return self.async_show_form(step_id="legacy", data_schema=LEGACY_SCHEMA) + + self._config_data.update(user_input) + return await self._async_save_entry() + + async def _async_save_entry(self) -> FlowResult: + """Save entry data if unique id is valid.""" + return self.async_create_entry( + title=self._config_data[CONF_HOST], + data=self._config_data, + ) @staticmethod @callback diff --git a/homeassistant/components/asuswrt/const.py b/homeassistant/components/asuswrt/const.py index 1733d4c09c3..a4cd6cde94c 100644 --- a/homeassistant/components/asuswrt/const.py +++ b/homeassistant/components/asuswrt/const.py @@ -20,6 +20,8 @@ KEY_SENSORS = "sensors" MODE_AP = "ap" MODE_ROUTER = "router" +PROTOCOL_HTTP = "http" +PROTOCOL_HTTPS = "https" PROTOCOL_SSH = "ssh" PROTOCOL_TELNET = "telnet" diff --git a/homeassistant/components/asuswrt/manifest.json b/homeassistant/components/asuswrt/manifest.json index 39f88fb96fe..9ed09cee67f 100644 --- a/homeassistant/components/asuswrt/manifest.json +++ b/homeassistant/components/asuswrt/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aioasuswrt", "asyncssh"], - "requirements": ["aioasuswrt==1.4.0"] + "requirements": ["aioasuswrt==1.4.0", "pyasuswrt==0.1.20"] } diff --git a/homeassistant/components/asuswrt/router.py b/homeassistant/components/asuswrt/router.py index c6fe651d292..927eef572f7 100644 --- a/homeassistant/components/asuswrt/router.py +++ b/homeassistant/components/asuswrt/router.py @@ -6,6 +6,8 @@ from datetime import datetime, timedelta import logging from typing import Any +from pyasuswrt import AsusWrtError + from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, @@ -219,7 +221,7 @@ class AsusWrtRouter: """Set up a AsusWrt router.""" try: await self._api.async_connect() - except OSError as exc: + except (AsusWrtError, OSError) as exc: raise ConfigEntryNotReady from exc if not self._api.is_connected: raise ConfigEntryNotReady diff --git a/homeassistant/components/asuswrt/strings.json b/homeassistant/components/asuswrt/strings.json index 52b9f919434..8a3207ec7cb 100644 --- a/homeassistant/components/asuswrt/strings.json +++ b/homeassistant/components/asuswrt/strings.json @@ -2,25 +2,31 @@ "config": { "step": { "user": { - "title": "AsusWRT", "description": "Set required parameter to connect to your router", "data": { "host": "[%key:common::config_flow::data::host%]", - "name": "[%key:common::config_flow::data::name%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "ssh_key": "Path to your SSH key file (instead of password)", "protocol": "Communication protocol to use", - "port": "Port (leave empty for protocol default)", - "mode": "[%key:common::config_flow::data::mode%]" + "port": "Port (leave empty for protocol default)" + }, + "data_description": { + "host": "The hostname or IP address of your ASUSWRT router." + } + }, + "legacy": { + "description": "Set required parameters to connect to your router", + "data": { + "mode": "Router operating mode" } } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_host": "[%key:common::config_flow::error::invalid_host%]", - "pwd_and_ssh": "Only provide password or SSH key file", "pwd_or_ssh": "Please provide password or SSH key file", + "pwd_required": "Password is required for selected protocol", "ssh_not_file": "SSH key file not found", "unknown": "[%key:common::config_flow::error::unknown%]" }, @@ -32,7 +38,6 @@ "options": { "step": { "init": { - "title": "AsusWRT Options", "data": { "consider_home": "Seconds to wait before considering a device away", "track_unknown": "Track unknown / unnamed devices", @@ -79,5 +84,15 @@ "name": "CPU Temperature" } } + }, + "selector": { + "protocols": { + "options": { + "https": "HTTPS", + "http": "HTTP", + "ssh": "SSH", + "telnet": "Telnet" + } + } } } diff --git a/homeassistant/components/atag/strings.json b/homeassistant/components/atag/strings.json index 39ed972524d..82070c0209f 100644 --- a/homeassistant/components/atag/strings.json +++ b/homeassistant/components/atag/strings.json @@ -2,10 +2,13 @@ "config": { "step": { "user": { - "title": "Connect to the device", + "description": "Connect to the device", "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of the Atag device." } } }, diff --git a/homeassistant/components/aurora_abb_powerone/__init__.py b/homeassistant/components/aurora_abb_powerone/__init__.py index b5dc236dfa2..43e3bd2ad5c 100644 --- a/homeassistant/components/aurora_abb_powerone/__init__.py +++ b/homeassistant/components/aurora_abb_powerone/__init__.py @@ -12,13 +12,14 @@ import logging -from aurorapy.client import AuroraSerialClient +from aurorapy.client import AuroraError, AuroraSerialClient, AuroraTimeoutError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN +from .const import DOMAIN, SCAN_INTERVAL PLATFORMS = [Platform.SENSOR] @@ -30,8 +31,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: comport = entry.data[CONF_PORT] address = entry.data[CONF_ADDRESS] - ser_client = AuroraSerialClient(address, comport, parity="N", timeout=1) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ser_client + coordinator = AuroraAbbDataUpdateCoordinator(hass, comport, address) + await coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -47,3 +50,58 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok + + +class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]): + """Class to manage fetching AuroraAbbPowerone data.""" + + def __init__(self, hass: HomeAssistant, comport: str, address: int) -> None: + """Initialize the data update coordinator.""" + self.available_prev = False + self.available = False + self.client = AuroraSerialClient(address, comport, parity="N", timeout=1) + super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) + + def _update_data(self) -> dict[str, float]: + """Fetch new state data for the sensor. + + This is the only function that should fetch new data for Home Assistant. + """ + data: dict[str, float] = {} + self.available_prev = self.available + try: + self.client.connect() + + # read ADC channel 3 (grid power output) + power_watts = self.client.measure(3, True) + temperature_c = self.client.measure(21) + energy_wh = self.client.cumulated_energy(5) + except AuroraTimeoutError: + self.available = False + _LOGGER.debug("No response from inverter (could be dark)") + except AuroraError as error: + self.available = False + raise error + else: + data["instantaneouspower"] = round(power_watts, 1) + data["temp"] = round(temperature_c, 1) + data["totalenergy"] = round(energy_wh / 1000, 2) + self.available = True + + finally: + if self.available != self.available_prev: + if self.available: + _LOGGER.info("Communication with %s back online", self.name) + else: + _LOGGER.warning( + "Communication with %s lost", + self.name, + ) + if self.client.serline.isOpen(): + self.client.close() + + return data + + async def _async_update_data(self) -> dict[str, float]: + """Update inverter data in the executor.""" + return await self.hass.async_add_executor_job(self._update_data) diff --git a/homeassistant/components/aurora_abb_powerone/aurora_device.py b/homeassistant/components/aurora_abb_powerone/aurora_device.py deleted file mode 100644 index e9ca9e47121..00000000000 --- a/homeassistant/components/aurora_abb_powerone/aurora_device.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Top level class for AuroraABBPowerOneSolarPV inverters and sensors.""" -from __future__ import annotations - -from collections.abc import Mapping -import logging -from typing import Any - -from aurorapy.client import AuroraSerialClient - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import Entity - -from .const import ( - ATTR_DEVICE_NAME, - ATTR_FIRMWARE, - ATTR_MODEL, - ATTR_SERIAL_NUMBER, - DEFAULT_DEVICE_NAME, - DOMAIN, - MANUFACTURER, -) - -_LOGGER = logging.getLogger(__name__) - - -class AuroraEntity(Entity): - """Representation of an Aurora ABB PowerOne device.""" - - def __init__(self, client: AuroraSerialClient, data: Mapping[str, Any]) -> None: - """Initialise the basic device.""" - self._data = data - self.type = "device" - self.client = client - self._available = True - - @property - def unique_id(self) -> str | None: - """Return the unique id for this device.""" - if (serial := self._data.get(ATTR_SERIAL_NUMBER)) is None: - return None - return f"{serial}_{self.entity_description.key}" - - @property - def available(self) -> bool: - """Return True if entity is available.""" - return self._available - - @property - def device_info(self) -> DeviceInfo: - """Return device specific attributes.""" - return DeviceInfo( - identifiers={(DOMAIN, self._data[ATTR_SERIAL_NUMBER])}, - manufacturer=MANUFACTURER, - model=self._data[ATTR_MODEL], - name=self._data.get(ATTR_DEVICE_NAME, DEFAULT_DEVICE_NAME), - sw_version=self._data[ATTR_FIRMWARE], - ) diff --git a/homeassistant/components/aurora_abb_powerone/const.py b/homeassistant/components/aurora_abb_powerone/const.py index 3711dd6d800..d1266a838c3 100644 --- a/homeassistant/components/aurora_abb_powerone/const.py +++ b/homeassistant/components/aurora_abb_powerone/const.py @@ -1,5 +1,7 @@ """Constants for the Aurora ABB PowerOne integration.""" +from datetime import timedelta + DOMAIN = "aurora_abb_powerone" # Min max addresses and default according to here: @@ -8,6 +10,7 @@ DOMAIN = "aurora_abb_powerone" MIN_ADDRESS = 2 MAX_ADDRESS = 63 DEFAULT_ADDRESS = 2 +SCAN_INTERVAL = timedelta(seconds=30) DEFAULT_INTEGRATION_TITLE = "PhotoVoltaic Inverters" DEFAULT_DEVICE_NAME = "Solar Inverter" diff --git a/homeassistant/components/aurora_abb_powerone/sensor.py b/homeassistant/components/aurora_abb_powerone/sensor.py index 55f3be5d6db..0e7d0c06a4e 100644 --- a/homeassistant/components/aurora_abb_powerone/sensor.py +++ b/homeassistant/components/aurora_abb_powerone/sensor.py @@ -5,8 +5,6 @@ from collections.abc import Mapping import logging from typing import Any -from aurorapy.client import AuroraError, AuroraSerialClient, AuroraTimeoutError - from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -21,10 +19,21 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .aurora_device import AuroraEntity -from .const import DOMAIN +from . import AuroraAbbDataUpdateCoordinator +from .const import ( + ATTR_DEVICE_NAME, + ATTR_FIRMWARE, + ATTR_MODEL, + ATTR_SERIAL_NUMBER, + DEFAULT_DEVICE_NAME, + DOMAIN, + MANUFACTURER, +) _LOGGER = logging.getLogger(__name__) @@ -61,70 +70,40 @@ async def async_setup_entry( """Set up aurora_abb_powerone sensor based on a config entry.""" entities = [] - client = hass.data[DOMAIN][config_entry.entry_id] + coordinator = hass.data[DOMAIN][config_entry.entry_id] data = config_entry.data for sens in SENSOR_TYPES: - entities.append(AuroraSensor(client, data, sens)) + entities.append(AuroraSensor(coordinator, data, sens)) _LOGGER.debug("async_setup_entry adding %d entities", len(entities)) async_add_entities(entities, True) -class AuroraSensor(AuroraEntity, SensorEntity): - """Representation of a Sensor on a Aurora ABB PowerOne Solar inverter.""" +class AuroraSensor(CoordinatorEntity[AuroraAbbDataUpdateCoordinator], SensorEntity): + """Representation of a Sensor on an Aurora ABB PowerOne Solar inverter.""" _attr_has_entity_name = True def __init__( self, - client: AuroraSerialClient, + coordinator: AuroraAbbDataUpdateCoordinator, data: Mapping[str, Any], entity_description: SensorEntityDescription, ) -> None: """Initialize the sensor.""" - super().__init__(client, data) + super().__init__(coordinator) self.entity_description = entity_description - self.available_prev = True + self._attr_unique_id = f"{data[ATTR_SERIAL_NUMBER]}_{entity_description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, data[ATTR_SERIAL_NUMBER])}, + manufacturer=MANUFACTURER, + model=data[ATTR_MODEL], + name=data.get(ATTR_DEVICE_NAME, DEFAULT_DEVICE_NAME), + sw_version=data[ATTR_FIRMWARE], + ) - def update(self) -> None: - """Fetch new state data for the sensor. - - This is the only method that should fetch new data for Home Assistant. - """ - try: - self.available_prev = self._attr_available - self.client.connect() - if self.entity_description.key == "instantaneouspower": - # read ADC channel 3 (grid power output) - power_watts = self.client.measure(3, True) - self._attr_native_value = round(power_watts, 1) - elif self.entity_description.key == "temp": - temperature_c = self.client.measure(21) - self._attr_native_value = round(temperature_c, 1) - elif self.entity_description.key == "totalenergy": - energy_wh = self.client.cumulated_energy(5) - self._attr_native_value = round(energy_wh / 1000, 2) - self._attr_available = True - - except AuroraTimeoutError: - self._attr_state = None - self._attr_native_value = None - self._attr_available = False - _LOGGER.debug("No response from inverter (could be dark)") - except AuroraError as error: - self._attr_state = None - self._attr_native_value = None - self._attr_available = False - raise error - finally: - if self._attr_available != self.available_prev: - if self._attr_available: - _LOGGER.info("Communication with %s back online", self.name) - else: - _LOGGER.warning( - "Communication with %s lost", - self.name, - ) - if self.client.serline.isOpen(): - self.client.close() + @property + def native_value(self) -> StateType: + """Get the value of the sensor from previously collected data.""" + return self.coordinator.data.get(self.entity_description.key) diff --git a/homeassistant/components/auth/login_flow.py b/homeassistant/components/auth/login_flow.py index e0cc0eeb1ec..96255f59c7b 100644 --- a/homeassistant/components/auth/login_flow.py +++ b/homeassistant/components/auth/login_flow.py @@ -71,14 +71,14 @@ from __future__ import annotations from collections.abc import Callable from http import HTTPStatus from ipaddress import ip_address -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast from aiohttp import web import voluptuous as vol import voluptuous_serialize from homeassistant import data_entry_flow -from homeassistant.auth import AuthManagerFlowManager +from homeassistant.auth import AuthManagerFlowManager, InvalidAuthError from homeassistant.auth.models import Credentials from homeassistant.components import onboarding from homeassistant.components.http.auth import async_user_not_allowed_do_auth @@ -90,10 +90,16 @@ from homeassistant.components.http.ban import ( from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.components.http.view import HomeAssistantView from homeassistant.core import HomeAssistant +from homeassistant.helpers.network import is_cloud_connection +from homeassistant.util.network import is_local from . import indieauth if TYPE_CHECKING: + from homeassistant.auth.providers.trusted_networks import ( + TrustedNetworksAuthProvider, + ) + from . import StoreResultType @@ -146,12 +152,61 @@ class AuthProvidersView(HomeAssistantView): message_code="onboarding_required", ) - return self.json( - [ - {"name": provider.name, "id": provider.id, "type": provider.type} - for provider in hass.auth.auth_providers - ] - ) + try: + remote_address = ip_address(request.remote) # type: ignore[arg-type] + except ValueError: + return self.json_message( + message="Invalid remote IP", + status_code=HTTPStatus.BAD_REQUEST, + message_code="invalid_remote_ip", + ) + + cloud_connection = is_cloud_connection(hass) + + providers = [] + for provider in hass.auth.auth_providers: + additional_data = {} + + if provider.type == "trusted_networks": + if cloud_connection: + # Skip quickly as trusted networks are not available on cloud + continue + + try: + cast("TrustedNetworksAuthProvider", provider).async_validate_access( + remote_address + ) + except InvalidAuthError: + # Not a trusted network, so we don't expose that trusted_network authenticator is setup + continue + elif ( + provider.type == "homeassistant" + and not cloud_connection + and is_local(remote_address) + and "person" in hass.config.components + ): + # We are local, return user id and username + users = await provider.store.async_get_users() + additional_data["users"] = { + user.id: credentials.data["username"] + for user in users + for credentials in user.credentials + if ( + credentials.auth_provider_type == provider.type + and credentials.auth_provider_id == provider.id + ) + } + + providers.append( + { + "name": provider.name, + "id": provider.id, + "type": provider.type, + **additional_data, + } + ) + + return self.json(providers) def _prepare_result_json( diff --git a/homeassistant/components/auth/strings.json b/homeassistant/components/auth/strings.json index d386bb7a488..0dd3ee64cdf 100644 --- a/homeassistant/components/auth/strings.json +++ b/homeassistant/components/auth/strings.json @@ -31,5 +31,11 @@ "invalid_code": "Invalid code, please try again." } } + }, + "issues": { + "deprecated_legacy_api_password": { + "title": "The legacy API password is deprecated", + "description": "The legacy API password authentication provider is deprecated and will be removed. Please remove it from your YAML configuration and use the default Home Assistant authentication provider instead." + } } } diff --git a/homeassistant/components/automation/helpers.py b/homeassistant/components/automation/helpers.py index 7c2efc17bf4..a7c329a544a 100644 --- a/homeassistant/components/automation/helpers.py +++ b/homeassistant/components/automation/helpers.py @@ -1,5 +1,6 @@ """Helpers for automation integration.""" from homeassistant.components import blueprint +from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.singleton import singleton @@ -15,8 +16,17 @@ def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool: return len(automations_with_blueprint(hass, blueprint_path)) > 0 +async def _reload_blueprint_automations( + hass: HomeAssistant, blueprint_path: str +) -> None: + """Reload all automations that rely on a specific blueprint.""" + await hass.services.async_call(DOMAIN, SERVICE_RELOAD) + + @singleton(DATA_BLUEPRINTS) @callback def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints: """Get automation blueprints.""" - return blueprint.DomainBlueprints(hass, DOMAIN, LOGGER, _blueprint_in_use) + return blueprint.DomainBlueprints( + hass, DOMAIN, LOGGER, _blueprint_in_use, _reload_blueprint_automations + ) diff --git a/homeassistant/components/axis/strings.json b/homeassistant/components/axis/strings.json index 47a25b542a7..8c302dba201 100644 --- a/homeassistant/components/axis/strings.json +++ b/homeassistant/components/axis/strings.json @@ -3,12 +3,16 @@ "flow_title": "{name} ({host})", "step": { "user": { - "title": "Set up Axis device", + "description": "Set up an Axis device", "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of the Axis device.", + "username": "The user name you set up on your Axis device. It is recommended to create a user specifically for Home Assistant." } } }, diff --git a/homeassistant/components/baf/fan.py b/homeassistant/components/baf/fan.py index 059603fc589..e2d1c5fcb3a 100644 --- a/homeassistant/components/baf/fan.py +++ b/homeassistant/components/baf/fan.py @@ -93,8 +93,6 @@ class BAFFan(BAFEntity, FanEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" - if preset_mode != PRESET_MODE_AUTO: - raise ValueError(f"Invalid preset mode: {preset_mode}") self._device.fan_mode = OffOnAuto.AUTO async def async_set_direction(self, direction: str) -> None: diff --git a/homeassistant/components/balboa/binary_sensor.py b/homeassistant/components/balboa/binary_sensor.py index 9f363746a8f..7462d051643 100644 --- a/homeassistant/components/balboa/binary_sensor.py +++ b/homeassistant/components/balboa/binary_sensor.py @@ -47,31 +47,27 @@ class BalboaBinarySensorEntityDescription( ): """A class that describes Balboa binary sensor entities.""" - # BalboaBinarySensorEntity does not support UNDEFINED or None, - # restrict the type to str. - name: str = "" - FILTER_CYCLE_ICONS = ("mdi:sync", "mdi:sync-off") BINARY_SENSOR_DESCRIPTIONS = ( BalboaBinarySensorEntityDescription( - key="filter_cycle_1", - name="Filter1", + key="Filter1", + translation_key="filter_1", device_class=BinarySensorDeviceClass.RUNNING, is_on_fn=lambda spa: spa.filter_cycle_1_running, on_off_icons=FILTER_CYCLE_ICONS, ), BalboaBinarySensorEntityDescription( - key="filter_cycle_2", - name="Filter2", + key="Filter2", + translation_key="filter_2", device_class=BinarySensorDeviceClass.RUNNING, is_on_fn=lambda spa: spa.filter_cycle_2_running, on_off_icons=FILTER_CYCLE_ICONS, ), ) CIRCULATION_PUMP_DESCRIPTION = BalboaBinarySensorEntityDescription( - key="circulation_pump", - name="Circ Pump", + key="Circ Pump", + translation_key="circ_pump", device_class=BinarySensorDeviceClass.RUNNING, is_on_fn=lambda spa: (pump := spa.circulation_pump) is not None and pump.state > 0, on_off_icons=("mdi:pump", "mdi:pump-off"), @@ -87,7 +83,7 @@ class BalboaBinarySensorEntity(BalboaEntity, BinarySensorEntity): self, spa: SpaClient, description: BalboaBinarySensorEntityDescription ) -> None: """Initialize a Balboa binary sensor entity.""" - super().__init__(spa, description.name) + super().__init__(spa, description.key) self.entity_description = description @property diff --git a/homeassistant/components/balboa/climate.py b/homeassistant/components/balboa/climate.py index 0d0fa9bd179..d213a8fd2e8 100644 --- a/homeassistant/components/balboa/climate.py +++ b/homeassistant/components/balboa/climate.py @@ -59,6 +59,7 @@ class BalboaClimateEntity(BalboaEntity, ClimateEntity): ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_translation_key = DOMAIN + _attr_name = None def __init__(self, client: SpaClient) -> None: """Initialize the climate entity.""" diff --git a/homeassistant/components/balboa/entity.py b/homeassistant/components/balboa/entity.py index 3b4f7d08fff..e02579658da 100644 --- a/homeassistant/components/balboa/entity.py +++ b/homeassistant/components/balboa/entity.py @@ -15,12 +15,11 @@ class BalboaEntity(Entity): _attr_should_poll = False _attr_has_entity_name = True - def __init__(self, client: SpaClient, name: str | None = None) -> None: + def __init__(self, client: SpaClient, key: str) -> None: """Initialize the control.""" mac = client.mac_address model = client.model - self._attr_unique_id = f'{model}-{name}-{mac.replace(":","")[-6:]}' - self._attr_name = name + self._attr_unique_id = f'{model}-{key}-{mac.replace(":","")[-6:]}' self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, mac)}, name=model, diff --git a/homeassistant/components/balboa/strings.json b/homeassistant/components/balboa/strings.json index 214ccf8fbe1..101436c0f31 100644 --- a/homeassistant/components/balboa/strings.json +++ b/homeassistant/components/balboa/strings.json @@ -2,9 +2,12 @@ "config": { "step": { "user": { - "title": "Connect to the Balboa Wi-Fi device", + "description": "Connect to the Balboa Wi-Fi device", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "Hostname or IP address of your Balboa Spa Wifi Device. For example, 192.168.1.58." } } }, @@ -26,6 +29,17 @@ } }, "entity": { + "binary_sensor": { + "filter_1": { + "name": "Filter cycle 1" + }, + "filter_2": { + "name": "Filter cycle 2" + }, + "circ_pump": { + "name": "Circulation pump" + } + }, "climate": { "balboa": { "state_attributes": { diff --git a/homeassistant/components/blebox/config_flow.py b/homeassistant/components/blebox/config_flow.py index 31d1f6162d7..977e704eb98 100644 --- a/homeassistant/components/blebox/config_flow.py +++ b/homeassistant/components/blebox/config_flow.py @@ -112,7 +112,7 @@ class BleBoxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): self.device_config["name"] = product.name # Check if configured but IP changed since await self.async_set_unique_id(product.unique_id) - self._abort_if_unique_id_configured() + self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host}) self.context.update( { "title_placeholders": { diff --git a/homeassistant/components/blink/__init__.py b/homeassistant/components/blink/__init__.py index c6413dd4372..d83c2686563 100644 --- a/homeassistant/components/blink/__init__.py +++ b/homeassistant/components/blink/__init__.py @@ -21,17 +21,11 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType -from .const import ( - DEFAULT_SCAN_INTERVAL, - DOMAIN, - PLATFORMS, - SERVICE_REFRESH, - SERVICE_SAVE_RECENT_CLIPS, - SERVICE_SAVE_VIDEO, - SERVICE_SEND_PIN, -) +from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, PLATFORMS from .coordinator import BlinkUpdateCoordinator +from .services import setup_services _LOGGER = logging.getLogger(__name__) @@ -43,6 +37,8 @@ SERVICE_SAVE_RECENT_CLIPS_SCHEMA = vol.Schema( {vol.Required(CONF_NAME): cv.string, vol.Required(CONF_FILE_PATH): cv.string} ) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + async def _reauth_flow_wrapper(hass, data): """Reauth flow wrapper.""" @@ -75,6 +71,14 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up Blink.""" + + setup_services(hass) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Blink via config entry.""" hass.data.setdefault(DOMAIN, {}) @@ -105,40 +109,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(update_listener)) - async def blink_refresh(event_time=None): - """Call blink to refresh info.""" - await coordinator.api.refresh(force_cache=True) - - async def async_save_video(call): - """Call save video service handler.""" - await async_handle_save_video_service(hass, entry, call) - - async def async_save_recent_clips(call): - """Call save recent clips service handler.""" - await async_handle_save_recent_clips_service(hass, entry, call) - - async def send_pin(call): - """Call blink to send new pin.""" - pin = call.data[CONF_PIN] - await coordinator.api.auth.send_auth_key( - hass.data[DOMAIN][entry.entry_id].api, - pin, - ) - - hass.services.async_register(DOMAIN, SERVICE_REFRESH, blink_refresh) - hass.services.async_register( - DOMAIN, SERVICE_SAVE_VIDEO, async_save_video, schema=SERVICE_SAVE_VIDEO_SCHEMA - ) - hass.services.async_register( - DOMAIN, - SERVICE_SAVE_RECENT_CLIPS, - async_save_recent_clips, - schema=SERVICE_SAVE_RECENT_CLIPS_SCHEMA, - ) - hass.services.async_register( - DOMAIN, SERVICE_SEND_PIN, send_pin, schema=SERVICE_SEND_PIN_SCHEMA - ) - return True @@ -158,13 +128,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload Blink entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) - if not hass.data[DOMAIN]: - return True - - hass.services.async_remove(DOMAIN, SERVICE_REFRESH) - hass.services.async_remove(DOMAIN, SERVICE_SAVE_VIDEO) - hass.services.async_remove(DOMAIN, SERVICE_SEND_PIN) - return unload_ok @@ -172,37 +135,3 @@ async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" blink: Blink = hass.data[DOMAIN][entry.entry_id].api blink.refresh_rate = entry.options[CONF_SCAN_INTERVAL] - - -async def async_handle_save_video_service( - hass: HomeAssistant, entry: ConfigEntry, call -) -> None: - """Handle save video service calls.""" - camera_name = call.data[CONF_NAME] - video_path = call.data[CONF_FILENAME] - if not hass.config.is_allowed_path(video_path): - _LOGGER.error("Can't write %s, no access to path!", video_path) - return - all_cameras = hass.data[DOMAIN][entry.entry_id].api.cameras - if camera_name in all_cameras: - try: - await all_cameras[camera_name].video_to_file(video_path) - except OSError as err: - _LOGGER.error("Can't write image to file: %s", err) - - -async def async_handle_save_recent_clips_service( - hass: HomeAssistant, entry: ConfigEntry, call -) -> None: - """Save multiple recent clips to output directory.""" - camera_name = call.data[CONF_NAME] - clips_dir = call.data[CONF_FILE_PATH] - if not hass.config.is_allowed_path(clips_dir): - _LOGGER.error("Can't write to directory %s, no access to path!", clips_dir) - return - all_cameras = hass.data[DOMAIN][entry.entry_id].api.cameras - if camera_name in all_cameras: - try: - await all_cameras[camera_name].save_recent_clips(output_dir=clips_dir) - except OSError as err: - _LOGGER.error("Can't write recent clips to directory: %s", err) diff --git a/homeassistant/components/blink/alarm_control_panel.py b/homeassistant/components/blink/alarm_control_panel.py index bf45ae7a582..8e0750d1373 100644 --- a/homeassistant/components/blink/alarm_control_panel.py +++ b/homeassistant/components/blink/alarm_control_panel.py @@ -104,4 +104,3 @@ class BlinkSyncModuleHA( raise HomeAssistantError("Blink failed to arm camera away") from er await self.coordinator.async_refresh() - self.async_write_ha_state() diff --git a/homeassistant/components/blink/binary_sensor.py b/homeassistant/components/blink/binary_sensor.py index 9400e79838b..8598868e2dc 100644 --- a/homeassistant/components/blink/binary_sensor.py +++ b/homeassistant/components/blink/binary_sensor.py @@ -32,9 +32,11 @@ BINARY_SENSORS_TYPES: tuple[BinarySensorEntityDescription, ...] = ( device_class=BinarySensorDeviceClass.BATTERY, entity_category=EntityCategory.DIAGNOSTIC, ), + # Camera Armed sensor is depreciated covered by switch and will be removed in 2023.6. BinarySensorEntityDescription( key=TYPE_CAMERA_ARMED, translation_key="camera_armed", + entity_registry_enabled_default=False, ), BinarySensorEntityDescription( key=TYPE_MOTION_DETECTED, diff --git a/homeassistant/components/blink/const.py b/homeassistant/components/blink/const.py index 7de42a80efc..64b05e1ba27 100644 --- a/homeassistant/components/blink/const.py +++ b/homeassistant/components/blink/const.py @@ -7,6 +7,7 @@ DEVICE_ID = "Home Assistant" CONF_MIGRATE = "migrate" CONF_CAMERA = "camera" CONF_ALARM_CONTROL_PANEL = "alarm_control_panel" +CONF_DEVICE_ID = "device_id" DEFAULT_BRAND = "Blink" DEFAULT_ATTRIBUTION = "Data provided by immedia-semi.com" DEFAULT_SCAN_INTERVAL = 300 @@ -30,4 +31,5 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.CAMERA, Platform.SENSOR, + Platform.SWITCH, ] diff --git a/homeassistant/components/blink/coordinator.py b/homeassistant/components/blink/coordinator.py index d3f7551e1b2..d53d23c4344 100644 --- a/homeassistant/components/blink/coordinator.py +++ b/homeassistant/components/blink/coordinator.py @@ -13,6 +13,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN _LOGGER = logging.getLogger(__name__) +SCAN_INTERVAL = 30 class BlinkUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): @@ -25,7 +26,7 @@ class BlinkUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): hass, _LOGGER, name=DOMAIN, - update_interval=timedelta(seconds=30), + update_interval=timedelta(seconds=SCAN_INTERVAL), ) async def _async_update_data(self) -> dict[str, Any]: diff --git a/homeassistant/components/blink/diagnostics.py b/homeassistant/components/blink/diagnostics.py new file mode 100644 index 00000000000..f69c1721bf1 --- /dev/null +++ b/homeassistant/components/blink/diagnostics.py @@ -0,0 +1,33 @@ +"""Diagnostics support for Blink.""" +from __future__ import annotations + +from typing import Any + +from blinkpy.blinkpy import Blink + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from .const import DOMAIN + +TO_REDACT = {"serial", "macaddress", "username", "password", "token"} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, + config_entry: ConfigEntry, +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + api: Blink = hass.data[DOMAIN][config_entry.entry_id].api + + data = { + camera.name: dict(camera.attributes.items()) + for _, camera in api.cameras.items() + } + + return { + "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), + "cameras": async_redact_data(data, TO_REDACT), + } diff --git a/homeassistant/components/blink/manifest.json b/homeassistant/components/blink/manifest.json index bb8fd4a5a51..db3ab91de11 100644 --- a/homeassistant/components/blink/manifest.json +++ b/homeassistant/components/blink/manifest.json @@ -1,7 +1,7 @@ { "domain": "blink", "name": "Blink", - "codeowners": ["@fronzbot"], + "codeowners": ["@fronzbot", "@mkmer"], "config_flow": true, "dhcp": [ { diff --git a/homeassistant/components/blink/services.py b/homeassistant/components/blink/services.py new file mode 100644 index 00000000000..12ac0d3b859 --- /dev/null +++ b/homeassistant/components/blink/services.py @@ -0,0 +1,175 @@ +"""Services for the Blink integration.""" +from __future__ import annotations + +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import ( + ATTR_DEVICE_ID, + CONF_FILE_PATH, + CONF_FILENAME, + CONF_NAME, + CONF_PIN, +) +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +import homeassistant.helpers.config_validation as cv +import homeassistant.helpers.device_registry as dr + +from .const import ( + DOMAIN, + SERVICE_REFRESH, + SERVICE_SAVE_RECENT_CLIPS, + SERVICE_SAVE_VIDEO, + SERVICE_SEND_PIN, +) +from .coordinator import BlinkUpdateCoordinator + +SERVICE_SAVE_VIDEO_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Required(CONF_NAME): cv.string, + vol.Required(CONF_FILENAME): cv.string, + } +) +SERVICE_SEND_PIN_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(CONF_PIN): cv.string, + } +) +SERVICE_SAVE_RECENT_CLIPS_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Required(CONF_NAME): cv.string, + vol.Required(CONF_FILE_PATH): cv.string, + } +) + + +def setup_services(hass: HomeAssistant) -> None: + """Set up the services for the Blink integration.""" + + def collect_coordinators( + device_ids: list[str], + ) -> list[BlinkUpdateCoordinator]: + config_entries: list[ConfigEntry] = [] + registry = dr.async_get(hass) + for target in device_ids: + device = registry.async_get(target) + if device: + device_entries: list[ConfigEntry] = [] + for entry_id in device.config_entries: + entry = hass.config_entries.async_get_entry(entry_id) + if entry and entry.domain == DOMAIN: + device_entries.append(entry) + if not device_entries: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_device", + translation_placeholders={"target": target, "domain": DOMAIN}, + ) + config_entries.extend(device_entries) + else: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="device_not_found", + translation_placeholders={"target": target}, + ) + + coordinators: list[BlinkUpdateCoordinator] = [] + for config_entry in config_entries: + if config_entry.state != ConfigEntryState.LOADED: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="not_loaded", + translation_placeholders={"target": config_entry.title}, + ) + + coordinators.append(hass.data[DOMAIN][config_entry.entry_id]) + return coordinators + + async def async_handle_save_video_service(call: ServiceCall) -> None: + """Handle save video service calls.""" + camera_name = call.data[CONF_NAME] + video_path = call.data[CONF_FILENAME] + if not hass.config.is_allowed_path(video_path): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_path", + translation_placeholders={"target": video_path}, + ) + + for coordinator in collect_coordinators(call.data[ATTR_DEVICE_ID]): + all_cameras = coordinator.api.cameras + if camera_name in all_cameras: + try: + await all_cameras[camera_name].video_to_file(video_path) + except OSError as err: + raise ServiceValidationError( + str(err), + translation_domain=DOMAIN, + translation_key="cant_write", + ) from err + + async def async_handle_save_recent_clips_service(call: ServiceCall) -> None: + """Save multiple recent clips to output directory.""" + camera_name = call.data[CONF_NAME] + clips_dir = call.data[CONF_FILE_PATH] + if not hass.config.is_allowed_path(clips_dir): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_path", + translation_placeholders={"target": clips_dir}, + ) + + for coordinator in collect_coordinators(call.data[ATTR_DEVICE_ID]): + all_cameras = coordinator.api.cameras + if camera_name in all_cameras: + try: + await all_cameras[camera_name].save_recent_clips( + output_dir=clips_dir + ) + except OSError as err: + raise ServiceValidationError( + str(err), + translation_domain=DOMAIN, + translation_key="cant_write", + ) from err + + async def send_pin(call: ServiceCall): + """Call blink to send new pin.""" + for coordinator in collect_coordinators(call.data[ATTR_DEVICE_ID]): + await coordinator.api.auth.send_auth_key( + coordinator.api, + call.data[CONF_PIN], + ) + + async def blink_refresh(call: ServiceCall): + """Call blink to refresh info.""" + for coordinator in collect_coordinators(call.data[ATTR_DEVICE_ID]): + await coordinator.api.refresh(force_cache=True) + + # Register all the above services + service_mapping = [ + (blink_refresh, SERVICE_REFRESH, None), + ( + async_handle_save_video_service, + SERVICE_SAVE_VIDEO, + SERVICE_SAVE_VIDEO_SCHEMA, + ), + ( + async_handle_save_recent_clips_service, + SERVICE_SAVE_RECENT_CLIPS, + SERVICE_SAVE_RECENT_CLIPS_SCHEMA, + ), + (send_pin, SERVICE_SEND_PIN, SERVICE_SEND_PIN_SCHEMA), + ] + + for service_handler, service_name, schema in service_mapping: + hass.services.async_register( + DOMAIN, + service_name, + service_handler, + schema=schema, + ) diff --git a/homeassistant/components/blink/strings.json b/homeassistant/components/blink/strings.json index 85556bbcd5a..f47f72acb9c 100644 --- a/homeassistant/components/blink/strings.json +++ b/homeassistant/components/blink/strings.json @@ -47,6 +47,11 @@ "camera_armed": { "name": "Camera armed" } + }, + "switch": { + "camera_motion": { + "name": "Camera motion detection" + } } }, "services": { @@ -96,5 +101,22 @@ } } } + }, + "exceptions": { + "invalid_device": { + "message": "Device '{target}' is not a {domain} device" + }, + "device_not_found": { + "message": "Device '{target}' not found in device registry" + }, + "no_path": { + "message": "Can't write to directory {target}, no access to path!" + }, + "cant_write": { + "message": "Can't write to file" + }, + "not_loaded": { + "message": "{target} is not loaded" + } } } diff --git a/homeassistant/components/blink/switch.py b/homeassistant/components/blink/switch.py new file mode 100644 index 00000000000..197c8e08685 --- /dev/null +++ b/homeassistant/components/blink/switch.py @@ -0,0 +1,99 @@ +"""Support for Blink Motion detection switches.""" +from __future__ import annotations + +import asyncio +from typing import Any + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DEFAULT_BRAND, DOMAIN, TYPE_CAMERA_ARMED +from .coordinator import BlinkUpdateCoordinator + +SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( + SwitchEntityDescription( + key=TYPE_CAMERA_ARMED, + icon="mdi:motion-sensor", + translation_key="camera_motion", + device_class=SwitchDeviceClass.SWITCH, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Blink switches.""" + coordinator: BlinkUpdateCoordinator = hass.data[DOMAIN][config.entry_id] + + async_add_entities( + BlinkSwitch(coordinator, camera, description) + for camera in coordinator.api.cameras + for description in SWITCH_TYPES + ) + + +class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity): + """Representation of a Blink motion detection switch.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: BlinkUpdateCoordinator, + camera, + description: SwitchEntityDescription, + ) -> None: + """Initialize the switch.""" + super().__init__(coordinator) + self._camera = coordinator.api.cameras[camera] + self.entity_description = description + serial = self._camera.serial + self._attr_unique_id = f"{serial}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, serial)}, + serial_number=serial, + name=camera, + manufacturer=DEFAULT_BRAND, + model=self._camera.camera_type, + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + try: + await self._camera.async_arm(True) + + except asyncio.TimeoutError as er: + raise HomeAssistantError( + "Blink failed to arm camera motion detection" + ) from er + + await self.coordinator.async_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + try: + await self._camera.async_arm(False) + + except asyncio.TimeoutError as er: + raise HomeAssistantError( + "Blink failed to dis-arm camera motion detection" + ) from er + + await self.coordinator.async_refresh() + + @property + def is_on(self) -> bool: + """Return if Camera Motion is enabled.""" + return self._camera.motion_enabled diff --git a/homeassistant/components/blueprint/models.py b/homeassistant/components/blueprint/models.py index 6f48080a451..ddf57aa6eee 100644 --- a/homeassistant/components/blueprint/models.py +++ b/homeassistant/components/blueprint/models.py @@ -2,7 +2,7 @@ from __future__ import annotations import asyncio -from collections.abc import Callable +from collections.abc import Awaitable, Callable import logging import pathlib import shutil @@ -189,12 +189,14 @@ class DomainBlueprints: domain: str, logger: logging.Logger, blueprint_in_use: Callable[[HomeAssistant, str], bool], + reload_blueprint_consumers: Callable[[HomeAssistant, str], Awaitable[None]], ) -> None: """Initialize a domain blueprints instance.""" self.hass = hass self.domain = domain self.logger = logger self._blueprint_in_use = blueprint_in_use + self._reload_blueprint_consumers = reload_blueprint_consumers self._blueprints: dict[str, Blueprint | None] = {} self._load_lock = asyncio.Lock() @@ -283,7 +285,7 @@ class DomainBlueprints: blueprint = await self.hass.async_add_executor_job( self._load_blueprint, blueprint_path ) - except Exception: + except FailedToLoad: self._blueprints[blueprint_path] = None raise @@ -315,31 +317,41 @@ class DomainBlueprints: await self.hass.async_add_executor_job(path.unlink) self._blueprints[blueprint_path] = None - def _create_file(self, blueprint: Blueprint, blueprint_path: str) -> None: - """Create blueprint file.""" + def _create_file( + self, blueprint: Blueprint, blueprint_path: str, allow_override: bool + ) -> bool: + """Create blueprint file. + + Returns true if the action overrides an existing blueprint. + """ path = pathlib.Path( self.hass.config.path(BLUEPRINT_FOLDER, self.domain, blueprint_path) ) - if path.exists(): + exists = path.exists() + + if not allow_override and exists: raise FileAlreadyExists(self.domain, blueprint_path) path.parent.mkdir(parents=True, exist_ok=True) path.write_text(blueprint.yaml(), encoding="utf-8") + return exists async def async_add_blueprint( - self, blueprint: Blueprint, blueprint_path: str - ) -> None: + self, blueprint: Blueprint, blueprint_path: str, allow_override=False + ) -> bool: """Add a blueprint.""" - if not blueprint_path.endswith(".yaml"): - blueprint_path = f"{blueprint_path}.yaml" - - await self.hass.async_add_executor_job( - self._create_file, blueprint, blueprint_path + overrides_existing = await self.hass.async_add_executor_job( + self._create_file, blueprint, blueprint_path, allow_override ) self._blueprints[blueprint_path] = blueprint + if overrides_existing: + await self._reload_blueprint_consumers(self.hass, blueprint_path) + + return overrides_existing + async def async_populate(self) -> None: """Create folder if it doesn't exist and populate with examples.""" if self._blueprints: diff --git a/homeassistant/components/blueprint/websocket_api.py b/homeassistant/components/blueprint/websocket_api.py index 1732320c1e9..3c7cc3769c8 100644 --- a/homeassistant/components/blueprint/websocket_api.py +++ b/homeassistant/components/blueprint/websocket_api.py @@ -14,7 +14,7 @@ from homeassistant.util import yaml from . import importer, models from .const import DOMAIN -from .errors import FileAlreadyExists +from .errors import FailedToLoad, FileAlreadyExists @callback @@ -81,6 +81,23 @@ async def ws_import_blueprint( ) return + # Check it exists and if so, which automations are using it + domain = imported_blueprint.blueprint.metadata["domain"] + domain_blueprints: models.DomainBlueprints | None = hass.data.get(DOMAIN, {}).get( + domain + ) + if domain_blueprints is None: + connection.send_error( + msg["id"], websocket_api.ERR_INVALID_FORMAT, "Unsupported domain" + ) + return + + suggested_path = f"{imported_blueprint.suggested_filename}.yaml" + try: + exists = bool(await domain_blueprints.async_get_blueprint(suggested_path)) + except FailedToLoad: + exists = False + connection.send_result( msg["id"], { @@ -90,6 +107,7 @@ async def ws_import_blueprint( "metadata": imported_blueprint.blueprint.metadata, }, "validation_errors": imported_blueprint.blueprint.validate(), + "exists": exists, }, ) @@ -101,6 +119,7 @@ async def ws_import_blueprint( vol.Required("path"): cv.path, vol.Required("yaml"): cv.string, vol.Optional("source_url"): cv.url, + vol.Optional("allow_override"): bool, } ) @websocket_api.async_response @@ -130,8 +149,13 @@ async def ws_save_blueprint( connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err)) return + if not path.endswith(".yaml"): + path = f"{path}.yaml" + try: - await domain_blueprints[domain].async_add_blueprint(blueprint, path) + overrides_existing = await domain_blueprints[domain].async_add_blueprint( + blueprint, path, allow_override=msg.get("allow_override", False) + ) except FileAlreadyExists: connection.send_error(msg["id"], "already_exists", "File already exists") return @@ -141,6 +165,9 @@ async def ws_save_blueprint( connection.send_result( msg["id"], + { + "overrides_existing": overrides_existing, + }, ) diff --git a/homeassistant/components/bluetooth/base_scanner.py b/homeassistant/components/bluetooth/base_scanner.py index 8eacd3e291a..637ebbaf867 100644 --- a/homeassistant/components/bluetooth/base_scanner.py +++ b/homeassistant/components/bluetooth/base_scanner.py @@ -334,7 +334,7 @@ class BaseHaRemoteScanner(BaseHaScanner): local_name = prev_name if service_uuids and service_uuids != prev_service_uuids: - service_uuids = list(set(service_uuids + prev_service_uuids)) + service_uuids = list({*service_uuids, *prev_service_uuids}) elif not service_uuids: service_uuids = prev_service_uuids diff --git a/homeassistant/components/bluetooth/manager.py b/homeassistant/components/bluetooth/manager.py index 34edccaf4ab..ce047747a0c 100644 --- a/homeassistant/components/bluetooth/manager.py +++ b/homeassistant/components/bluetooth/manager.py @@ -124,6 +124,7 @@ class BluetoothManager: "storage", "slot_manager", "_debug", + "shutdown", ) def __init__( @@ -165,6 +166,7 @@ class BluetoothManager: self.storage = storage self.slot_manager = slot_manager self._debug = _LOGGER.isEnabledFor(logging.DEBUG) + self.shutdown = False @property def supports_passive_scan(self) -> bool: @@ -259,6 +261,7 @@ class BluetoothManager: def async_stop(self, event: Event) -> None: """Stop the Bluetooth integration at shutdown.""" _LOGGER.debug("Stopping bluetooth manager") + self.shutdown = True if self._cancel_unavailable_tracking: self._cancel_unavailable_tracking() self._cancel_unavailable_tracking = None diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 89e6b350cad..b4975e61507 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -18,7 +18,7 @@ "bleak-retry-connector==3.3.0", "bluetooth-adapters==0.16.1", "bluetooth-auto-recovery==1.2.3", - "bluetooth-data-tools==1.14.0", + "bluetooth-data-tools==1.16.0", "dbus-fast==2.14.0" ] } diff --git a/homeassistant/components/bluetooth/wrappers.py b/homeassistant/components/bluetooth/wrappers.py index bfcee9d25df..9de020f163e 100644 --- a/homeassistant/components/bluetooth/wrappers.py +++ b/homeassistant/components/bluetooth/wrappers.py @@ -270,6 +270,8 @@ class HaBleakClientWrapper(BleakClient): """Connect to the specified GATT server.""" assert models.MANAGER is not None manager = models.MANAGER + if manager.shutdown: + raise BleakError("Bluetooth is already shutdown") if debug_logging := _LOGGER.isEnabledFor(logging.DEBUG): _LOGGER.debug("%s: Looking for backend to connect", self.__address) wrapped_backend = self._async_get_best_available_backend_and_device(manager) diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index b5652694120..1ebf52e52ae 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive", "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], - "requirements": ["bimmer-connected==0.14.2"] + "requirements": ["bimmer-connected[china]==0.14.5"] } diff --git a/homeassistant/components/bmw_connected_drive/select.py b/homeassistant/components/bmw_connected_drive/select.py index 3467322a4af..1d8b736f4dd 100644 --- a/homeassistant/components/bmw_connected_drive/select.py +++ b/homeassistant/components/bmw_connected_drive/select.py @@ -44,7 +44,8 @@ SELECT_TYPES: dict[str, BMWSelectEntityDescription] = { translation_key="ac_limit", is_available=lambda v: v.is_remote_set_ac_limit_enabled, dynamic_options=lambda v: [ - str(lim) for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr] + str(lim) + for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr] ], current_option=lambda v: str(v.charging_profile.ac_current_limit), # type: ignore[union-attr] remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update( diff --git a/homeassistant/components/bond/fan.py b/homeassistant/components/bond/fan.py index bc6235cb219..3cb81ba40b4 100644 --- a/homeassistant/components/bond/fan.py +++ b/homeassistant/components/bond/fan.py @@ -199,10 +199,6 @@ class BondFan(BondEntity, FanEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" - if preset_mode != PRESET_MODE_BREEZE or not self._device.has_action( - Action.BREEZE_ON - ): - raise ValueError(f"Invalid preset mode: {preset_mode}") await self._hub.bond.action(self._device.device_id, Action(Action.BREEZE_ON)) async def async_turn_off(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/bond/strings.json b/homeassistant/components/bond/strings.json index 4c7c224bc44..8986905c6ee 100644 --- a/homeassistant/components/bond/strings.json +++ b/homeassistant/components/bond/strings.json @@ -12,6 +12,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "access_token": "[%key:common::config_flow::data::access_token%]" + }, + "data_description": { + "host": "The IP address of your Bond hub." } } }, diff --git a/homeassistant/components/bosch_shc/manifest.json b/homeassistant/components/bosch_shc/manifest.json index 9fd1055dd60..e29865153b3 100644 --- a/homeassistant/components/bosch_shc/manifest.json +++ b/homeassistant/components/bosch_shc/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/bosch_shc", "iot_class": "local_push", "loggers": ["boschshcpy"], - "requirements": ["boschshcpy==0.2.57"], + "requirements": ["boschshcpy==0.2.75"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/bosch_shc/strings.json b/homeassistant/components/bosch_shc/strings.json index 90688e1373f..88eb817bbd9 100644 --- a/homeassistant/components/bosch_shc/strings.json +++ b/homeassistant/components/bosch_shc/strings.json @@ -6,6 +6,9 @@ "title": "SHC authentication parameters", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your Bosch Smart Home Controller." } }, "credentials": { diff --git a/homeassistant/components/braviatv/strings.json b/homeassistant/components/braviatv/strings.json index 8f8e728cb9d..4b28fa91d74 100644 --- a/homeassistant/components/braviatv/strings.json +++ b/homeassistant/components/braviatv/strings.json @@ -5,6 +5,9 @@ "description": "Ensure that your TV is turned on before trying to set it up.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Sony Bravia TV to control." } }, "authorize": { diff --git a/homeassistant/components/broadlink/climate.py b/homeassistant/components/broadlink/climate.py new file mode 100644 index 00000000000..6937d6bb0da --- /dev/null +++ b/homeassistant/components/broadlink/climate.py @@ -0,0 +1,85 @@ +"""Support for Broadlink climate devices.""" +from typing import Any + +from homeassistant.components.climate import ( + ATTR_TEMPERATURE, + ClimateEntity, + ClimateEntityFeature, + HVACAction, + HVACMode, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import PRECISION_HALVES, Platform, UnitOfTemperature +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN, DOMAINS_AND_TYPES +from .device import BroadlinkDevice +from .entity import BroadlinkEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Broadlink climate entities.""" + device = hass.data[DOMAIN].devices[config_entry.entry_id] + + if device.api.type in DOMAINS_AND_TYPES[Platform.CLIMATE]: + async_add_entities([BroadlinkThermostat(device)]) + + +class BroadlinkThermostat(ClimateEntity, BroadlinkEntity): + """Representation of a Broadlink Hysen climate entity.""" + + _attr_has_entity_name = True + _attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF, HVACMode.AUTO] + _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE + _attr_target_temperature_step = PRECISION_HALVES + _attr_temperature_unit = UnitOfTemperature.CELSIUS + + def __init__(self, device: BroadlinkDevice) -> None: + """Initialize the climate entity.""" + super().__init__(device) + self._attr_unique_id = device.unique_id + self._attr_hvac_mode = None + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + temperature = kwargs[ATTR_TEMPERATURE] + await self._device.async_request(self._device.api.set_temp, temperature) + self._attr_target_temperature = temperature + self.async_write_ha_state() + + @callback + def _update_state(self, data: dict[str, Any]) -> None: + """Update data.""" + if data.get("power"): + if data.get("auto_mode"): + self._attr_hvac_mode = HVACMode.AUTO + else: + self._attr_hvac_mode = HVACMode.HEAT + + if data.get("active"): + self._attr_hvac_action = HVACAction.HEATING + else: + self._attr_hvac_action = HVACAction.IDLE + else: + self._attr_hvac_mode = HVACMode.OFF + self._attr_hvac_action = HVACAction.OFF + + self._attr_current_temperature = data.get("room_temp") + self._attr_target_temperature = data.get("thermostat_temp") + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + if hvac_mode == HVACMode.OFF: + await self._device.async_request(self._device.api.set_power, 0) + else: + await self._device.async_request(self._device.api.set_power, 1) + mode = 0 if hvac_mode == HVACMode.HEAT else 1 + await self._device.async_request(self._device.api.set_mode, mode, 0) + + self._attr_hvac_mode = hvac_mode + self.async_write_ha_state() diff --git a/homeassistant/components/broadlink/const.py b/homeassistant/components/broadlink/const.py index c1ccc5ec954..2b9e8787a43 100644 --- a/homeassistant/components/broadlink/const.py +++ b/homeassistant/components/broadlink/const.py @@ -4,6 +4,7 @@ from homeassistant.const import Platform DOMAIN = "broadlink" DOMAINS_AND_TYPES = { + Platform.CLIMATE: {"HYS"}, Platform.REMOTE: {"RM4MINI", "RM4PRO", "RMMINI", "RMMINIB", "RMPRO"}, Platform.SENSOR: { "A1", diff --git a/homeassistant/components/broadlink/manifest.json b/homeassistant/components/broadlink/manifest.json index 5778520e530..7fd925a2ff4 100644 --- a/homeassistant/components/broadlink/manifest.json +++ b/homeassistant/components/broadlink/manifest.json @@ -1,7 +1,7 @@ { "domain": "broadlink", "name": "Broadlink", - "codeowners": ["@danielhiversen", "@felipediel", "@L-I-Am"], + "codeowners": ["@danielhiversen", "@felipediel", "@L-I-Am", "@eifinger"], "config_flow": true, "dhcp": [ { @@ -30,6 +30,9 @@ }, { "macaddress": "EC0BAE*" + }, + { + "macaddress": "780F77*" } ], "documentation": "https://www.home-assistant.io/integrations/broadlink", diff --git a/homeassistant/components/broadlink/strings.json b/homeassistant/components/broadlink/strings.json index 87567bcb7b1..335984d1ebe 100644 --- a/homeassistant/components/broadlink/strings.json +++ b/homeassistant/components/broadlink/strings.json @@ -3,10 +3,13 @@ "flow_title": "{name} ({model} at {host})", "step": { "user": { - "title": "Connect to the device", + "description": "Connect to the device", "data": { "host": "[%key:common::config_flow::data::host%]", "timeout": "Timeout" + }, + "data_description": { + "host": "The hostname or IP address of your Broadlink device." } }, "auth": { diff --git a/homeassistant/components/broadlink/updater.py b/homeassistant/components/broadlink/updater.py index da8461bf90f..10ac4df4bb8 100644 --- a/homeassistant/components/broadlink/updater.py +++ b/homeassistant/components/broadlink/updater.py @@ -16,6 +16,7 @@ def get_update_manager(device): update_managers = { "A1": BroadlinkA1UpdateManager, "BG1": BroadlinkBG1UpdateManager, + "HYS": BroadlinkThermostatUpdateManager, "LB1": BroadlinkLB1UpdateManager, "LB2": BroadlinkLB1UpdateManager, "MP1": BroadlinkMP1UpdateManager, @@ -184,3 +185,11 @@ class BroadlinkLB1UpdateManager(BroadlinkUpdateManager): async def async_fetch_data(self): """Fetch data from the device.""" return await self.device.async_request(self.device.api.get_state) + + +class BroadlinkThermostatUpdateManager(BroadlinkUpdateManager): + """Manages updates for thermostats with Broadlink DNA.""" + + async def async_fetch_data(self): + """Fetch data from the device.""" + return await self.device.async_request(self.device.api.get_full_status) diff --git a/homeassistant/components/brother/strings.json b/homeassistant/components/brother/strings.json index e24c941c514..0d8f4f4eedf 100644 --- a/homeassistant/components/brother/strings.json +++ b/homeassistant/components/brother/strings.json @@ -6,6 +6,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "type": "Type of the printer" + }, + "data_description": { + "host": "The hostname or IP address of the Brother printer to control." } }, "zeroconf_confirm": { diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index 39eab6e7e0a..609d5ab6e83 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -60,8 +60,7 @@ async def async_setup_entry( data.static, entry, ) - ], - True, + ] ) diff --git a/homeassistant/components/bsblan/strings.json b/homeassistant/components/bsblan/strings.json index 0693f3fb8ea..689d1f893d3 100644 --- a/homeassistant/components/bsblan/strings.json +++ b/homeassistant/components/bsblan/strings.json @@ -11,6 +11,9 @@ "passkey": "Passkey string", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your BSB-Lan device." } } }, diff --git a/homeassistant/components/caldav/api.py b/homeassistant/components/caldav/api.py index f9236049048..fa89d6acc38 100644 --- a/homeassistant/components/caldav/api.py +++ b/homeassistant/components/caldav/api.py @@ -11,7 +11,11 @@ async def async_get_calendars( hass: HomeAssistant, client: caldav.DAVClient, component: str ) -> list[caldav.Calendar]: """Get all calendars that support the specified component.""" - calendars = await hass.async_add_executor_job(client.principal().calendars) + + def _get_calendars() -> list[caldav.Calendar]: + return client.principal().calendars() + + calendars = await hass.async_add_executor_job(_get_calendars) components_results = await asyncio.gather( *[ hass.async_add_executor_job(calendar.get_supported_components) diff --git a/homeassistant/components/caldav/todo.py b/homeassistant/components/caldav/todo.py index 887f760399b..1bd24dc542a 100644 --- a/homeassistant/components/caldav/todo.py +++ b/homeassistant/components/caldav/todo.py @@ -1,16 +1,27 @@ """CalDAV todo platform.""" from __future__ import annotations -from datetime import timedelta +import asyncio +from datetime import date, datetime, timedelta from functools import partial import logging +from typing import Any, cast import caldav +from caldav.lib.error import DAVError, NotFoundError +import requests -from homeassistant.components.todo import TodoItem, TodoItemStatus, TodoListEntity +from homeassistant.components.todo import ( + TodoItem, + TodoItemStatus, + TodoListEntity, + TodoListEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util from .api import async_get_calendars, get_attr_value from .const import DOMAIN @@ -26,6 +37,10 @@ TODO_STATUS_MAP = { "COMPLETED": TodoItemStatus.COMPLETED, "CANCELLED": TodoItemStatus.COMPLETED, } +TODO_STATUS_MAP_INV: dict[TodoItemStatus, str] = { + TodoItemStatus.NEEDS_ACTION: "NEEDS-ACTION", + TodoItemStatus.COMPLETED: "COMPLETED", +} async def async_setup_entry( @@ -57,6 +72,12 @@ def _todo_item(resource: caldav.CalendarObjectResource) -> TodoItem | None: or (summary := get_attr_value(todo, "summary")) is None ): return None + due: date | datetime | None = None + if due_value := get_attr_value(todo, "due"): + if isinstance(due_value, datetime): + due = dt_util.as_local(due_value) + elif isinstance(due_value, date): + due = due_value return TodoItem( uid=uid, summary=summary, @@ -64,13 +85,40 @@ def _todo_item(resource: caldav.CalendarObjectResource) -> TodoItem | None: get_attr_value(todo, "status") or "", TodoItemStatus.NEEDS_ACTION, ), + due=due, + description=get_attr_value(todo, "description"), ) +def _to_ics_fields(item: TodoItem) -> dict[str, Any]: + """Convert a TodoItem to the set of add or update arguments.""" + item_data: dict[str, Any] = {} + if summary := item.summary: + item_data["summary"] = summary + if status := item.status: + item_data["status"] = TODO_STATUS_MAP_INV.get(status, "NEEDS-ACTION") + if due := item.due: + if isinstance(due, datetime): + item_data["due"] = dt_util.as_utc(due).strftime("%Y%m%dT%H%M%SZ") + else: + item_data["due"] = due.strftime("%Y%m%d") + if description := item.description: + item_data["description"] = description + return item_data + + class WebDavTodoListEntity(TodoListEntity): """CalDAV To-do list entity.""" _attr_has_entity_name = True + _attr_supported_features = ( + TodoListEntityFeature.CREATE_TODO_ITEM + | TodoListEntityFeature.UPDATE_TODO_ITEM + | TodoListEntityFeature.DELETE_TODO_ITEM + | TodoListEntityFeature.SET_DUE_DATE_ON_ITEM + | TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM + | TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM + ) def __init__(self, calendar: caldav.Calendar, config_entry_id: str) -> None: """Initialize WebDavTodoListEntity.""" @@ -92,3 +140,57 @@ class WebDavTodoListEntity(TodoListEntity): for resource in results if (todo_item := _todo_item(resource)) is not None ] + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add an item to the To-do list.""" + try: + await self.hass.async_add_executor_job( + partial(self._calendar.save_todo, **_to_ics_fields(item)), + ) + except (requests.ConnectionError, DAVError) as err: + raise HomeAssistantError(f"CalDAV save error: {err}") from err + + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update a To-do item.""" + uid: str = cast(str, item.uid) + try: + todo = await self.hass.async_add_executor_job( + self._calendar.todo_by_uid, uid + ) + except NotFoundError as err: + raise HomeAssistantError(f"Could not find To-do item {uid}") from err + except (requests.ConnectionError, DAVError) as err: + raise HomeAssistantError(f"CalDAV lookup error: {err}") from err + vtodo = todo.icalendar_component # type: ignore[attr-defined] + vtodo.update(**_to_ics_fields(item)) + try: + await self.hass.async_add_executor_job( + partial( + todo.save, + no_create=True, + obj_type="todo", + ), + ) + except (requests.ConnectionError, DAVError) as err: + raise HomeAssistantError(f"CalDAV save error: {err}") from err + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete To-do items.""" + tasks = ( + self.hass.async_add_executor_job(self._calendar.todo_by_uid, uid) + for uid in uids + ) + + try: + items = await asyncio.gather(*tasks) + except NotFoundError as err: + raise HomeAssistantError("Could not find To-do item") from err + except (requests.ConnectionError, DAVError) as err: + raise HomeAssistantError(f"CalDAV lookup error: {err}") from err + + # Run serially as some CalDAV servers do not support concurrent modifications + for item in items: + try: + await self.hass.async_add_executor_job(item.delete) + except (requests.ConnectionError, DAVError) as err: + raise HomeAssistantError(f"CalDAV delete error: {err}") from err diff --git a/homeassistant/components/calendar/__init__.py b/homeassistant/components/calendar/__init__.py index 2be0bd9a04b..5b98d372220 100644 --- a/homeassistant/components/calendar/__init__.py +++ b/homeassistant/components/calendar/__init__.py @@ -37,6 +37,7 @@ from homeassistant.helpers.config_validation import ( # noqa: F401 from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import async_track_point_in_time +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.template import DATE_STR_FORMAT from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util @@ -261,8 +262,10 @@ CALENDAR_EVENT_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) -SERVICE_LIST_EVENTS: Final = "list_events" -SERVICE_LIST_EVENTS_SCHEMA: Final = vol.All( +LEGACY_SERVICE_LIST_EVENTS: Final = "list_events" +"""Deprecated: please use SERVICE_LIST_EVENTS.""" +SERVICE_GET_EVENTS: Final = "get_events" +SERVICE_GET_EVENTS_SCHEMA: Final = vol.All( cv.has_at_least_one_key(EVENT_END_DATETIME, EVENT_DURATION), cv.has_at_most_one_key(EVENT_END_DATETIME, EVENT_DURATION), cv.make_entity_service_schema( @@ -301,11 +304,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: required_features=[CalendarEntityFeature.CREATE_EVENT], ) component.async_register_legacy_entity_service( - SERVICE_LIST_EVENTS, - SERVICE_LIST_EVENTS_SCHEMA, + LEGACY_SERVICE_LIST_EVENTS, + SERVICE_GET_EVENTS_SCHEMA, async_list_events_service, supports_response=SupportsResponse.ONLY, ) + component.async_register_entity_service( + SERVICE_GET_EVENTS, + SERVICE_GET_EVENTS_SCHEMA, + async_get_events_service, + supports_response=SupportsResponse.ONLY, + ) await component.async_setup(config) return True @@ -850,6 +859,32 @@ async def async_create_event(entity: CalendarEntity, call: ServiceCall) -> None: async def async_list_events_service( calendar: CalendarEntity, service_call: ServiceCall +) -> ServiceResponse: + """List events on a calendar during a time range. + + Deprecated: please use async_get_events_service. + """ + _LOGGER.warning( + "Detected use of service 'calendar.list_events'. " + "This is deprecated and will stop working in Home Assistant 2024.6. " + "Use 'calendar.get_events' instead which supports multiple entities", + ) + async_create_issue( + calendar.hass, + DOMAIN, + "deprecated_service_calendar_list_events", + breaks_in_ha_version="2024.6.0", + is_fixable=True, + is_persistent=False, + issue_domain=calendar.platform.platform_name, + severity=IssueSeverity.WARNING, + translation_key="deprecated_service_calendar_list_events", + ) + return await async_get_events_service(calendar, service_call) + + +async def async_get_events_service( + calendar: CalendarEntity, service_call: ServiceCall ) -> ServiceResponse: """List events on a calendar during a time range.""" start = service_call.data.get(EVENT_START_DATETIME, dt_util.now()) diff --git a/homeassistant/components/calendar/services.yaml b/homeassistant/components/calendar/services.yaml index 712d6ad8823..2e926fbdeed 100644 --- a/homeassistant/components/calendar/services.yaml +++ b/homeassistant/components/calendar/services.yaml @@ -52,3 +52,19 @@ list_events: duration: selector: duration: +get_events: + target: + entity: + domain: calendar + fields: + start_date_time: + example: "2022-03-22 20:00:00" + selector: + datetime: + end_date_time: + example: "2022-03-22 22:00:00" + selector: + datetime: + duration: + selector: + duration: diff --git a/homeassistant/components/calendar/strings.json b/homeassistant/components/calendar/strings.json index 20679ed09b2..57450000199 100644 --- a/homeassistant/components/calendar/strings.json +++ b/homeassistant/components/calendar/strings.json @@ -72,9 +72,9 @@ } } }, - "list_events": { - "name": "List event", - "description": "Lists events on a calendar within a time range.", + "get_events": { + "name": "Get event", + "description": "Get events on a calendar within a time range.", "fields": { "start_date_time": { "name": "Start time", @@ -89,6 +89,37 @@ "description": "Returns active events from start_date_time until the specified duration." } } + }, + "list_events": { + "name": "List event", + "description": "Lists events on a calendar within a time range.", + "fields": { + "start_date_time": { + "name": "[%key:component::calendar::services::get_events::fields::start_date_time::name%]", + "description": "[%key:component::calendar::services::get_events::fields::start_date_time::description%]" + }, + "end_date_time": { + "name": "[%key:component::calendar::services::get_events::fields::end_date_time::name%]", + "description": "[%key:component::calendar::services::get_events::fields::end_date_time::description%]" + }, + "duration": { + "name": "[%key:component::calendar::services::get_events::fields::duration::name%]", + "description": "[%key:component::calendar::services::get_events::fields::duration::description%]" + } + } + } + }, + "issues": { + "deprecated_service_calendar_list_events": { + "title": "Detected use of deprecated service `calendar.list_events`", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::calendar::issues::deprecated_service_calendar_list_events::title%]", + "description": "Use `calendar.get_events` instead which supports multiple entities.\n\nPlease replace this service and adjust your automations and scripts and select **submit** to close this issue." + } + } + } } } } diff --git a/homeassistant/components/cast/manifest.json b/homeassistant/components/cast/manifest.json index 7cf318f12a6..5035b3c6620 100644 --- a/homeassistant/components/cast/manifest.json +++ b/homeassistant/components/cast/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/cast", "iot_class": "local_polling", "loggers": ["casttube", "pychromecast"], - "requirements": ["PyChromecast==13.0.7"], + "requirements": ["PyChromecast==13.0.8"], "zeroconf": ["_googlecast._tcp.local."] } diff --git a/homeassistant/components/climate/intent.py b/homeassistant/components/climate/intent.py new file mode 100644 index 00000000000..23cc3d5bcd2 --- /dev/null +++ b/homeassistant/components/climate/intent.py @@ -0,0 +1,68 @@ +"""Intents for the client integration.""" +from __future__ import annotations + +import voluptuous as vol + +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import intent +from homeassistant.helpers.entity_component import EntityComponent + +from . import DOMAIN, ClimateEntity + +INTENT_GET_TEMPERATURE = "HassClimateGetTemperature" + + +async def async_setup_intents(hass: HomeAssistant) -> None: + """Set up the climate intents.""" + intent.async_register(hass, GetTemperatureIntent()) + + +class GetTemperatureIntent(intent.IntentHandler): + """Handle GetTemperature intents.""" + + intent_type = INTENT_GET_TEMPERATURE + slot_schema = {vol.Optional("area"): str} + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Handle the intent.""" + hass = intent_obj.hass + slots = self.async_validate_slots(intent_obj.slots) + + component: EntityComponent[ClimateEntity] = hass.data[DOMAIN] + entities: list[ClimateEntity] = list(component.entities) + climate_entity: ClimateEntity | None = None + climate_state: State | None = None + + if not entities: + raise intent.IntentHandleError("No climate entities") + + if "area" in slots: + # Filter by area + area_name = slots["area"]["value"] + + for maybe_climate in intent.async_match_states( + hass, area_name=area_name, domains=[DOMAIN] + ): + climate_state = maybe_climate + break + + if climate_state is None: + raise intent.IntentHandleError(f"No climate entity in area {area_name}") + + climate_entity = component.get_entity(climate_state.entity_id) + else: + # First entity + climate_entity = entities[0] + climate_state = hass.states.get(climate_entity.entity_id) + + assert climate_entity is not None + + if climate_state is None: + raise intent.IntentHandleError(f"No state for {climate_entity.name}") + + assert climate_state is not None + + response = intent_obj.create_response() + response.response_type = intent.IntentResponseType.QUERY_ANSWER + response.async_set_states(matched_states=[climate_state]) + return response diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index e3b1b39f687..634a5e20b33 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -140,7 +140,7 @@ def _ws_handle_cloud_errors( handler: Callable[ [HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], Coroutine[None, None, None], - ] + ], ) -> Callable[ [HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], Coroutine[None, None, None], @@ -362,8 +362,11 @@ def _require_cloud_login( handler: Callable[ [HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], None, - ] -) -> Callable[[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], None,]: + ], +) -> Callable[ + [HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], + None, +]: """Websocket decorator that requires cloud to be logged in.""" @wraps(handler) diff --git a/homeassistant/components/cloudflare/__init__.py b/homeassistant/components/cloudflare/__init__.py index 1901bfdc0e7..d4c6775c6b9 100644 --- a/homeassistant/components/cloudflare/__init__.py +++ b/homeassistant/components/cloudflare/__init__.py @@ -4,8 +4,8 @@ from __future__ import annotations import asyncio from datetime import timedelta import logging +import socket -from aiohttp import ClientSession import pycfdns from homeassistant.config_entries import ConfigEntry @@ -51,7 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up recurring update.""" try: await _async_update_cloudflare( - session, client, dns_zone, entry.data[CONF_RECORDS] + hass, client, dns_zone, entry.data[CONF_RECORDS] ) except ( pycfdns.AuthenticationException, @@ -63,7 +63,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up service for manual trigger.""" try: await _async_update_cloudflare( - session, client, dns_zone, entry.data[CONF_RECORDS] + hass, client, dns_zone, entry.data[CONF_RECORDS] ) except ( pycfdns.AuthenticationException, @@ -92,7 +92,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def _async_update_cloudflare( - session: ClientSession, + hass: HomeAssistant, client: pycfdns.Client, dns_zone: pycfdns.ZoneModel, target_records: list[str], @@ -102,6 +102,7 @@ async def _async_update_cloudflare( records = await client.list_dns_records(zone_id=dns_zone["id"], type="A") _LOGGER.debug("Records: %s", records) + session = async_get_clientsession(hass, family=socket.AF_INET) location_info = await async_detect_location_info(session) if not location_info or not is_ipv4_address(location_info.ip): diff --git a/homeassistant/components/co2signal/__init__.py b/homeassistant/components/co2signal/__init__.py index 04ae811197b..028d37a73c5 100644 --- a/homeassistant/components/co2signal/__init__.py +++ b/homeassistant/components/co2signal/__init__.py @@ -1,9 +1,12 @@ """The CO2 Signal integration.""" from __future__ import annotations +from aioelectricitymaps import ElectricityMaps + from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN from .coordinator import CO2SignalCoordinator @@ -13,7 +16,10 @@ PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up CO2 Signal from a config entry.""" - coordinator = CO2SignalCoordinator(hass, entry) + session = async_get_clientsession(hass) + coordinator = CO2SignalCoordinator( + hass, ElectricityMaps(token=entry.data[CONF_API_KEY], session=session) + ) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator diff --git a/homeassistant/components/co2signal/config_flow.py b/homeassistant/components/co2signal/config_flow.py index d41bd6e0f78..234c1c01392 100644 --- a/homeassistant/components/co2signal/config_flow.py +++ b/homeassistant/components/co2signal/config_flow.py @@ -1,13 +1,18 @@ """Config flow for Co2signal integration.""" from __future__ import annotations +from collections.abc import Mapping from typing import Any +from aioelectricitymaps import ElectricityMaps +from aioelectricitymaps.exceptions import ElectricityMapsError, InvalidToken import voluptuous as vol from homeassistant import config_entries +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.data_entry_flow import FlowResult +from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.selector import ( SelectSelector, @@ -16,8 +21,7 @@ from homeassistant.helpers.selector import ( ) from .const import CONF_COUNTRY_CODE, DOMAIN -from .coordinator import get_data -from .exceptions import APIRatelimitExceeded, InvalidAuth +from .helpers import fetch_latest_carbon_intensity from .util import get_extra_name TYPE_USE_HOME = "use_home_location" @@ -30,6 +34,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 1 _data: dict | None + _reauth_entry: ConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -111,25 +116,52 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): "country", data_schema, {**self._data, **user_input} ) + async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult: + """Handle the reauth step.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + + data_schema = vol.Schema( + { + vol.Required(CONF_API_KEY): cv.string, + } + ) + return await self._validate_and_create("reauth", data_schema, entry_data) + async def _validate_and_create( - self, step_id: str, data_schema: vol.Schema, data: dict + self, step_id: str, data_schema: vol.Schema, data: Mapping[str, Any] ) -> FlowResult: """Validate data and show form if it is invalid.""" errors: dict[str, str] = {} - try: - await self.hass.async_add_executor_job(get_data, self.hass, data) - except InvalidAuth: - errors["base"] = "invalid_auth" - except APIRatelimitExceeded: - errors["base"] = "api_ratelimit" - except Exception: # pylint: disable=broad-except - errors["base"] = "unknown" - else: - return self.async_create_entry( - title=get_extra_name(data) or "CO2 Signal", - data=data, - ) + if data: + session = async_get_clientsession(self.hass) + em = ElectricityMaps(token=data[CONF_API_KEY], session=session) + + try: + await fetch_latest_carbon_intensity(self.hass, em, data) + except InvalidToken: + errors["base"] = "invalid_auth" + except ElectricityMapsError: + errors["base"] = "unknown" + else: + if self._reauth_entry: + self.hass.config_entries.async_update_entry( + self._reauth_entry, + data={ + CONF_API_KEY: data[CONF_API_KEY], + }, + ) + await self.hass.config_entries.async_reload( + self._reauth_entry.entry_id + ) + return self.async_abort(reason="reauth_successful") + + return self.async_create_entry( + title=get_extra_name(data) or "CO2 Signal", + data=data, + ) return self.async_show_form( step_id=step_id, diff --git a/homeassistant/components/co2signal/coordinator.py b/homeassistant/components/co2signal/coordinator.py index 24d7bbd18af..115c976b465 100644 --- a/homeassistant/components/co2signal/coordinator.py +++ b/homeassistant/components/co2signal/coordinator.py @@ -1,94 +1,49 @@ """DataUpdateCoordinator for the co2signal integration.""" from __future__ import annotations -from collections.abc import Mapping from datetime import timedelta import logging -from typing import Any, cast -import CO2Signal -from requests.exceptions import JSONDecodeError +from aioelectricitymaps import ElectricityMaps +from aioelectricitymaps.exceptions import ElectricityMapsError, InvalidToken +from aioelectricitymaps.models import CarbonIntensityResponse from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_COUNTRY_CODE, DOMAIN -from .exceptions import APIRatelimitExceeded, CO2Error, InvalidAuth, UnknownError -from .models import CO2SignalResponse +from .const import DOMAIN +from .helpers import fetch_latest_carbon_intensity _LOGGER = logging.getLogger(__name__) -class CO2SignalCoordinator(DataUpdateCoordinator[CO2SignalResponse]): +class CO2SignalCoordinator(DataUpdateCoordinator[CarbonIntensityResponse]): """Data update coordinator.""" - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, client: ElectricityMaps) -> None: """Initialize the coordinator.""" super().__init__( hass, _LOGGER, name=DOMAIN, update_interval=timedelta(minutes=15) ) - self._entry = entry + self.client = client @property def entry_id(self) -> str: """Return entry ID.""" - return self._entry.entry_id + return self.config_entry.entry_id - async def _async_update_data(self) -> CO2SignalResponse: + async def _async_update_data(self) -> CarbonIntensityResponse: """Fetch the latest data from the source.""" + try: - data = await self.hass.async_add_executor_job( - get_data, self.hass, self._entry.data + return await fetch_latest_carbon_intensity( + self.hass, self.client, self.config_entry.data ) - except InvalidAuth as err: + except InvalidToken as err: raise ConfigEntryAuthFailed from err - except CO2Error as err: + except ElectricityMapsError as err: raise UpdateFailed(str(err)) from err - - return data - - -def get_data(hass: HomeAssistant, config: Mapping[str, Any]) -> CO2SignalResponse: - """Get data from the API.""" - if CONF_COUNTRY_CODE in config: - latitude = None - longitude = None - else: - latitude = config.get(CONF_LATITUDE, hass.config.latitude) - longitude = config.get(CONF_LONGITUDE, hass.config.longitude) - - try: - data = CO2Signal.get_latest( - config[CONF_API_KEY], - config.get(CONF_COUNTRY_CODE), - latitude, - longitude, - wait=False, - ) - - except JSONDecodeError as err: - # raise occasional occurring json decoding errors as CO2Error so the data update coordinator retries it - raise CO2Error from err - - except ValueError as err: - err_str = str(err) - - if "Invalid authentication credentials" in err_str: - raise InvalidAuth from err - if "API rate limit exceeded." in err_str: - raise APIRatelimitExceeded from err - - _LOGGER.exception("Unexpected exception") - raise UnknownError from err - - if "error" in data: - raise UnknownError(data["error"]) - - if data.get("status") != "ok": - _LOGGER.exception("Unexpected response: %s", data) - raise UnknownError - - return cast(CO2SignalResponse, data) diff --git a/homeassistant/components/co2signal/diagnostics.py b/homeassistant/components/co2signal/diagnostics.py index db08aa4eca6..1c53f7c5b08 100644 --- a/homeassistant/components/co2signal/diagnostics.py +++ b/homeassistant/components/co2signal/diagnostics.py @@ -1,6 +1,7 @@ """Diagnostics support for CO2Signal.""" from __future__ import annotations +from dataclasses import asdict from typing import Any from homeassistant.components.diagnostics import async_redact_data @@ -22,5 +23,5 @@ async def async_get_config_entry_diagnostics( return { "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), - "data": coordinator.data, + "data": asdict(coordinator.data), } diff --git a/homeassistant/components/co2signal/exceptions.py b/homeassistant/components/co2signal/exceptions.py deleted file mode 100644 index cc8ee709bde..00000000000 --- a/homeassistant/components/co2signal/exceptions.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Exceptions to the co2signal integration.""" -from homeassistant.exceptions import HomeAssistantError - - -class CO2Error(HomeAssistantError): - """Base error.""" - - -class InvalidAuth(CO2Error): - """Raised when invalid authentication credentials are provided.""" - - -class APIRatelimitExceeded(CO2Error): - """Raised when the API rate limit is exceeded.""" - - -class UnknownError(CO2Error): - """Raised when an unknown error occurs.""" diff --git a/homeassistant/components/co2signal/helpers.py b/homeassistant/components/co2signal/helpers.py new file mode 100644 index 00000000000..43579c162e2 --- /dev/null +++ b/homeassistant/components/co2signal/helpers.py @@ -0,0 +1,28 @@ +"""Helper functions for the CO2 Signal integration.""" +from collections.abc import Mapping +from typing import Any + +from aioelectricitymaps import ElectricityMaps +from aioelectricitymaps.models import CarbonIntensityResponse + +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from .const import CONF_COUNTRY_CODE + + +async def fetch_latest_carbon_intensity( + hass: HomeAssistant, + em: ElectricityMaps, + config: Mapping[str, Any], +) -> CarbonIntensityResponse: + """Fetch the latest carbon intensity based on country code or location coordinates.""" + if CONF_COUNTRY_CODE in config: + return await em.latest_carbon_intensity_by_country_code( + code=config[CONF_COUNTRY_CODE] + ) + + return await em.latest_carbon_intensity_by_coordinates( + lat=config.get(CONF_LATITUDE, hass.config.latitude), + lon=config.get(CONF_LONGITUDE, hass.config.longitude), + ) diff --git a/homeassistant/components/co2signal/manifest.json b/homeassistant/components/co2signal/manifest.json index a4d7c55d6da..d82af5b5034 100644 --- a/homeassistant/components/co2signal/manifest.json +++ b/homeassistant/components/co2signal/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/co2signal", "integration_type": "service", "iot_class": "cloud_polling", - "loggers": ["CO2Signal"], - "requirements": ["CO2Signal==0.4.2"] + "loggers": ["aioelectricitymaps"], + "requirements": ["aioelectricitymaps==0.1.5"] } diff --git a/homeassistant/components/co2signal/models.py b/homeassistant/components/co2signal/models.py deleted file mode 100644 index 758bb15c5f0..00000000000 --- a/homeassistant/components/co2signal/models.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Models to the co2signal integration.""" -from typing import TypedDict - - -class CO2SignalData(TypedDict): - """Data field.""" - - carbonIntensity: float - fossilFuelPercentage: float - - -class CO2SignalUnit(TypedDict): - """Unit field.""" - - carbonIntensity: str - - -class CO2SignalResponse(TypedDict): - """API response.""" - - status: str - countryCode: str - data: CO2SignalData - units: CO2SignalUnit diff --git a/homeassistant/components/co2signal/sensor.py b/homeassistant/components/co2signal/sensor.py index d00bdf70d3e..00051d8bec9 100644 --- a/homeassistant/components/co2signal/sensor.py +++ b/homeassistant/components/co2signal/sensor.py @@ -1,9 +1,10 @@ """Support for the CO2signal platform.""" from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass -from datetime import timedelta -from typing import cast + +from aioelectricitymaps.models import CarbonIntensityResponse from homeassistant.components.sensor import ( SensorEntity, @@ -20,15 +21,17 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ATTRIBUTION, DOMAIN from .coordinator import CO2SignalCoordinator -SCAN_INTERVAL = timedelta(minutes=3) - -@dataclass +@dataclass(kw_only=True) class CO2SensorEntityDescription(SensorEntityDescription): """Provide a description of a CO2 sensor.""" # For backwards compat, allow description to override unique ID key to use unique_id: str | None = None + unit_of_measurement_fn: Callable[ + [CarbonIntensityResponse], str | None + ] | None = None + value_fn: Callable[[CarbonIntensityResponse], float | None] SENSORS = ( @@ -36,12 +39,14 @@ SENSORS = ( key="carbonIntensity", translation_key="carbon_intensity", unique_id="co2intensity", - # No unit, it's extracted from response. + value_fn=lambda response: response.data.carbon_intensity, + unit_of_measurement_fn=lambda response: response.units.carbon_intensity, ), CO2SensorEntityDescription( key="fossilFuelPercentage", translation_key="fossil_fuel_percentage", native_unit_of_measurement=PERCENTAGE, + value_fn=lambda response: response.data.fossil_fuel_percentage, ), ) @@ -51,7 +56,9 @@ async def async_setup_entry( ) -> None: """Set up the CO2signal sensor.""" coordinator: CO2SignalCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities(CO2Sensor(coordinator, description) for description in SENSORS) + async_add_entities( + [CO2Sensor(coordinator, description) for description in SENSORS], False + ) class CO2Sensor(CoordinatorEntity[CO2SignalCoordinator], SensorEntity): @@ -71,7 +78,7 @@ class CO2Sensor(CoordinatorEntity[CO2SignalCoordinator], SensorEntity): self.entity_description = description self._attr_extra_state_attributes = { - "country_code": coordinator.data["countryCode"], + "country_code": coordinator.data.country_code, } self._attr_device_info = DeviceInfo( configuration_url="https://www.electricitymaps.com/", @@ -84,26 +91,15 @@ class CO2Sensor(CoordinatorEntity[CO2SignalCoordinator], SensorEntity): f"{coordinator.entry_id}_{description.unique_id or description.key}" ) - @property - def available(self) -> bool: - """Return True if entity is available.""" - return ( - super().available - and self.entity_description.key in self.coordinator.data["data"] - ) - @property def native_value(self) -> float | None: """Return sensor state.""" - if (value := self.coordinator.data["data"][self.entity_description.key]) is None: # type: ignore[literal-required] - return None - return round(value, 2) + return self.entity_description.value_fn(self.coordinator.data) @property def native_unit_of_measurement(self) -> str | None: """Return the unit of measurement.""" - if self.entity_description.native_unit_of_measurement: - return self.entity_description.native_unit_of_measurement - return cast( - str, self.coordinator.data["units"].get(self.entity_description.key) - ) + if self.entity_description.unit_of_measurement_fn: + return self.entity_description.unit_of_measurement_fn(self.coordinator.data) + + return self.entity_description.native_unit_of_measurement diff --git a/homeassistant/components/co2signal/strings.json b/homeassistant/components/co2signal/strings.json index 4564fdf14be..89289dd816d 100644 --- a/homeassistant/components/co2signal/strings.json +++ b/homeassistant/components/co2signal/strings.json @@ -18,6 +18,11 @@ "data": { "country_code": "Country code" } + }, + "reauth": { + "data": { + "api_key": "[%key:common::config_flow::data::access_token%]" + } } }, "error": { @@ -28,7 +33,8 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "unknown": "[%key:common::config_flow::error::unknown%]", - "api_ratelimit": "[%key:component::co2signal::config::error::api_ratelimit%]" + "api_ratelimit": "[%key:component::co2signal::config::error::api_ratelimit%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/homeassistant/components/coinbase/config_flow.py b/homeassistant/components/coinbase/config_flow.py index 5dc60f535d7..38053295411 100644 --- a/homeassistant/components/coinbase/config_flow.py +++ b/homeassistant/components/coinbase/config_flow.py @@ -17,6 +17,7 @@ import homeassistant.helpers.config_validation as cv from . import get_accounts from .const import ( API_ACCOUNT_CURRENCY, + API_ACCOUNT_CURRENCY_CODE, API_RATES, API_RESOURCE_TYPE, API_TYPE_VAULT, @@ -81,7 +82,7 @@ async def validate_options( accounts = await hass.async_add_executor_job(get_accounts, client) accounts_currencies = [ - account[API_ACCOUNT_CURRENCY] + account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] for account in accounts if account[API_RESOURCE_TYPE] != API_TYPE_VAULT ] diff --git a/homeassistant/components/coinbase/const.py b/homeassistant/components/coinbase/const.py index c5fdec4d511..3fc8158f970 100644 --- a/homeassistant/components/coinbase/const.py +++ b/homeassistant/components/coinbase/const.py @@ -12,14 +12,16 @@ DOMAIN = "coinbase" API_ACCOUNT_AMOUNT = "amount" API_ACCOUNT_BALANCE = "balance" API_ACCOUNT_CURRENCY = "currency" +API_ACCOUNT_CURRENCY_CODE = "code" API_ACCOUNT_ID = "id" -API_ACCOUNT_NATIVE_BALANCE = "native_balance" +API_ACCOUNT_NATIVE_BALANCE = "balance" API_ACCOUNT_NAME = "name" API_ACCOUNTS_DATA = "data" API_RATES = "rates" API_RESOURCE_PATH = "resource_path" API_RESOURCE_TYPE = "type" API_TYPE_VAULT = "vault" +API_USD = "USD" WALLETS = { "1INCH": "1INCH", diff --git a/homeassistant/components/coinbase/sensor.py b/homeassistant/components/coinbase/sensor.py index 47fd3b91129..1442a626f74 100644 --- a/homeassistant/components/coinbase/sensor.py +++ b/homeassistant/components/coinbase/sensor.py @@ -14,9 +14,9 @@ from .const import ( API_ACCOUNT_AMOUNT, API_ACCOUNT_BALANCE, API_ACCOUNT_CURRENCY, + API_ACCOUNT_CURRENCY_CODE, API_ACCOUNT_ID, API_ACCOUNT_NAME, - API_ACCOUNT_NATIVE_BALANCE, API_RATES, API_RESOURCE_TYPE, API_TYPE_VAULT, @@ -55,7 +55,7 @@ async def async_setup_entry( entities: list[SensorEntity] = [] provided_currencies: list[str] = [ - account[API_ACCOUNT_CURRENCY] + account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] for account in instance.accounts if account[API_RESOURCE_TYPE] != API_TYPE_VAULT ] @@ -106,26 +106,28 @@ class AccountSensor(SensorEntity): self._currency = currency for account in coinbase_data.accounts: if ( - account[API_ACCOUNT_CURRENCY] != currency + account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] != currency or account[API_RESOURCE_TYPE] == API_TYPE_VAULT ): continue self._attr_name = f"Coinbase {account[API_ACCOUNT_NAME]}" self._attr_unique_id = ( f"coinbase-{account[API_ACCOUNT_ID]}-wallet-" - f"{account[API_ACCOUNT_CURRENCY]}" + f"{account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]}" ) self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT] - self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY] + self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY][ + API_ACCOUNT_CURRENCY_CODE + ] self._attr_icon = CURRENCY_ICONS.get( - account[API_ACCOUNT_CURRENCY], DEFAULT_COIN_ICON + account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE], + DEFAULT_COIN_ICON, + ) + self._native_balance = round( + float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]) + / float(coinbase_data.exchange_rates[API_RATES][currency]), + 2, ) - self._native_balance = account[API_ACCOUNT_NATIVE_BALANCE][ - API_ACCOUNT_AMOUNT - ] - self._native_currency = account[API_ACCOUNT_NATIVE_BALANCE][ - API_ACCOUNT_CURRENCY - ] break self._attr_state_class = SensorStateClass.TOTAL @@ -141,7 +143,7 @@ class AccountSensor(SensorEntity): def extra_state_attributes(self) -> dict[str, str]: """Return the state attributes of the sensor.""" return { - ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._native_currency}", + ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}", } def update(self) -> None: @@ -149,17 +151,17 @@ class AccountSensor(SensorEntity): self._coinbase_data.update() for account in self._coinbase_data.accounts: if ( - account[API_ACCOUNT_CURRENCY] != self._currency + account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] + != self._currency or account[API_RESOURCE_TYPE] == API_TYPE_VAULT ): continue self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT] - self._native_balance = account[API_ACCOUNT_NATIVE_BALANCE][ - API_ACCOUNT_AMOUNT - ] - self._native_currency = account[API_ACCOUNT_NATIVE_BALANCE][ - API_ACCOUNT_CURRENCY - ] + self._native_balance = round( + float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]) + / float(self._coinbase_data.exchange_rates[API_RATES][self._currency]), + 2, + ) break diff --git a/homeassistant/components/comelit/coordinator.py b/homeassistant/components/comelit/coordinator.py index d3bc973429b..1573d5cb627 100644 --- a/homeassistant/components/comelit/coordinator.py +++ b/homeassistant/components/comelit/coordinator.py @@ -68,13 +68,13 @@ class ComelitSerialBridge(DataUpdateCoordinator): async def _async_update_data(self) -> dict[str, Any]: """Update device data.""" _LOGGER.debug("Polling Comelit Serial Bridge host: %s", self._host) + try: await self.api.login() + return await self.api.get_all_devices() except exceptions.CannotConnect as err: _LOGGER.warning("Connection error for %s", self._host) await self.api.close() raise UpdateFailed(f"Error fetching data: {repr(err)}") from err except exceptions.CannotAuthenticate: raise ConfigEntryAuthFailed - - return await self.api.get_all_devices() diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index 5978f17cfc4..89157b54255 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/comelit", "iot_class": "local_polling", "loggers": ["aiocomelit"], - "requirements": ["aiocomelit==0.3.0"] + "requirements": ["aiocomelit==0.6.2"] } diff --git a/homeassistant/components/comelit/strings.json b/homeassistant/components/comelit/strings.json index 730674e913a..73c2c7d00c6 100644 --- a/homeassistant/components/comelit/strings.json +++ b/homeassistant/components/comelit/strings.json @@ -13,6 +13,9 @@ "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", "pin": "[%key:common::config_flow::data::pin%]" + }, + "data_description": { + "host": "The hostname or IP address of your Comelit device." } } }, diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 9dcf70dda80..99ebb4b60b1 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -188,11 +188,14 @@ class DefaultAgent(AbstractConversationAgent): return None slot_lists = self._make_slot_lists() + intent_context = self._make_intent_context(user_input) + result = await self.hass.async_add_executor_job( self._recognize, user_input, lang_intents, slot_lists, + intent_context, ) return result @@ -221,15 +224,17 @@ class DefaultAgent(AbstractConversationAgent): # loaded in async_recognize. assert lang_intents is not None + # Slot values to pass to the intent + slots = { + entity.name: {"value": entity.value} for entity in result.entities_list + } + try: intent_response = await intent.async_handle( self.hass, DOMAIN, result.intent.name, - { - entity.name: {"value": entity.value} - for entity in result.entities_list - }, + slots, user_input.text, user_input.context, language, @@ -277,12 +282,16 @@ class DefaultAgent(AbstractConversationAgent): user_input: ConversationInput, lang_intents: LanguageIntents, slot_lists: dict[str, SlotList], + intent_context: dict[str, Any] | None, ) -> RecognizeResult | None: """Search intents for a match to user input.""" # Prioritize matches with entity names above area names maybe_result: RecognizeResult | None = None for result in recognize_all( - user_input.text, lang_intents.intents, slot_lists=slot_lists + user_input.text, + lang_intents.intents, + slot_lists=slot_lists, + intent_context=intent_context, ): if "name" in result.entities: return result @@ -623,6 +632,25 @@ class DefaultAgent(AbstractConversationAgent): return self._slot_lists + def _make_intent_context( + self, user_input: ConversationInput + ) -> dict[str, Any] | None: + """Return intent recognition context for user input.""" + if not user_input.device_id: + return None + + devices = dr.async_get(self.hass) + device = devices.async_get(user_input.device_id) + if (device is None) or (device.area_id is None): + return None + + areas = ar.async_get(self.hass) + device_area = areas.async_get_area(device.area_id) + if device_area is None: + return None + + return {"area": device_area.name} + def _get_error_text( self, response_type: ResponseType, lang_intents: LanguageIntents | None ) -> str: diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 1b4d346082a..2a069d5d92b 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["hassil==1.2.5", "home-assistant-intents==2023.10.16"] + "requirements": ["hassil==1.5.1", "home-assistant-intents==2023.11.29"] } diff --git a/homeassistant/components/coolmaster/strings.json b/homeassistant/components/coolmaster/strings.json index 7baa6444c1d..17deab306df 100644 --- a/homeassistant/components/coolmaster/strings.json +++ b/homeassistant/components/coolmaster/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "user": { - "title": "Set up your CoolMasterNet connection details.", + "description": "Set up your CoolMasterNet connection details.", "data": { "host": "[%key:common::config_flow::data::host%]", "off": "Can be turned off", @@ -12,6 +12,9 @@ "dry": "Support dry mode", "fan_only": "Support fan only mode", "swing_support": "Control swing mode" + }, + "data_description": { + "host": "The hostname or IP address of your CoolMasterNet device." } } }, diff --git a/homeassistant/components/daikin/switch.py b/homeassistant/components/daikin/switch.py index 8dd75916685..7acd234e397 100644 --- a/homeassistant/components/daikin/switch.py +++ b/homeassistant/components/daikin/switch.py @@ -13,8 +13,10 @@ from . import DOMAIN as DAIKIN_DOMAIN, DaikinApi ZONE_ICON = "mdi:home-circle" STREAMER_ICON = "mdi:air-filter" +TOGGLE_ICON = "mdi:power" DAIKIN_ATTR_ADVANCED = "adv" DAIKIN_ATTR_STREAMER = "streamer" +DAIKIN_ATTR_MODE = "mode" async def async_setup_platform( @@ -35,7 +37,7 @@ async def async_setup_entry( ) -> None: """Set up Daikin climate based on config_entry.""" daikin_api: DaikinApi = hass.data[DAIKIN_DOMAIN][entry.entry_id] - switches: list[DaikinZoneSwitch | DaikinStreamerSwitch] = [] + switches: list[DaikinZoneSwitch | DaikinStreamerSwitch | DaikinToggleSwitch] = [] if zones := daikin_api.device.zones: switches.extend( [ @@ -49,6 +51,7 @@ async def async_setup_entry( # device supports the streamer, so assume so if it does support # advanced modes. switches.append(DaikinStreamerSwitch(daikin_api)) + switches.append(DaikinToggleSwitch(daikin_api)) async_add_entities(switches) @@ -119,3 +122,33 @@ class DaikinStreamerSwitch(SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn the zone off.""" await self._api.device.set_streamer("off") + + +class DaikinToggleSwitch(SwitchEntity): + """Switch state.""" + + _attr_icon = TOGGLE_ICON + _attr_has_entity_name = True + + def __init__(self, api: DaikinApi) -> None: + """Initialize switch.""" + self._api = api + self._attr_device_info = api.device_info + self._attr_unique_id = f"{self._api.device.mac}-toggle" + + @property + def is_on(self) -> bool: + """Return the state of the sensor.""" + return "off" not in self._api.device.represent(DAIKIN_ATTR_MODE) + + async def async_update(self) -> None: + """Retrieve latest state.""" + await self._api.async_update() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the zone on.""" + await self._api.device.set({}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the zone off.""" + await self._api.device.set({DAIKIN_ATTR_MODE: "off"}) diff --git a/homeassistant/components/deconz/binary_sensor.py b/homeassistant/components/deconz/binary_sensor.py index 114e401346d..84141eac964 100644 --- a/homeassistant/components/deconz/binary_sensor.py +++ b/homeassistant/components/deconz/binary_sensor.py @@ -65,24 +65,15 @@ T = TypeVar( ) -@dataclass -class DeconzBinarySensorDescriptionMixin(Generic[T]): - """Required values when describing secondary sensor attributes.""" - - update_key: str - value_fn: Callable[[T], bool | None] - - -@dataclass -class DeconzBinarySensorDescription( - BinarySensorEntityDescription, - DeconzBinarySensorDescriptionMixin[T], -): +@dataclass(kw_only=True) +class DeconzBinarySensorDescription(Generic[T], BinarySensorEntityDescription): """Class describing deCONZ binary sensor entities.""" instance_check: type[T] | None = None name_suffix: str = "" old_unique_id_suffix: str = "" + update_key: str + value_fn: Callable[[T], bool | None] ENTITY_DESCRIPTIONS: tuple[DeconzBinarySensorDescription, ...] = ( diff --git a/homeassistant/components/deconz/button.py b/homeassistant/components/deconz/button.py index 318e0e43beb..81d839ea0f2 100644 --- a/homeassistant/components/deconz/button.py +++ b/homeassistant/components/deconz/button.py @@ -23,18 +23,13 @@ from .deconz_device import DeconzDevice, DeconzSceneMixin from .gateway import DeconzGateway, get_gateway_from_config_entry -@dataclass -class DeconzButtonDescriptionMixin: - """Required values when describing deCONZ button entities.""" - - suffix: str - button_fn: str - - -@dataclass -class DeconzButtonDescription(ButtonEntityDescription, DeconzButtonDescriptionMixin): +@dataclass(kw_only=True) +class DeconzButtonDescription(ButtonEntityDescription): """Class describing deCONZ button entities.""" + button_fn: str + suffix: str + ENTITY_DESCRIPTIONS = { PydeconzScene: [ diff --git a/homeassistant/components/deconz/deconz_device.py b/homeassistant/components/deconz/deconz_device.py index 4c0f35266f9..8a5ced2c678 100644 --- a/homeassistant/components/deconz/deconz_device.py +++ b/homeassistant/components/deconz/deconz_device.py @@ -129,9 +129,8 @@ class DeconzDevice(DeconzBase[_DeviceT], Entity): if self.gateway.ignore_state_updates: return - if ( - self._update_keys is not None - and not self._device.changed_keys.intersection(self._update_keys) + if self._update_keys is not None and not self._device.changed_keys.intersection( + self._update_keys ): return diff --git a/homeassistant/components/deconz/manifest.json b/homeassistant/components/deconz/manifest.json index 6245558a1c5..af1824e441c 100644 --- a/homeassistant/components/deconz/manifest.json +++ b/homeassistant/components/deconz/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["pydeconz"], "quality_scale": "platinum", - "requirements": ["pydeconz==113"], + "requirements": ["pydeconz==114"], "ssdp": [ { "manufacturer": "Royal Philips Electronics", diff --git a/homeassistant/components/deconz/number.py b/homeassistant/components/deconz/number.py index ec4438502b6..7cc0da936cb 100644 --- a/homeassistant/components/deconz/number.py +++ b/homeassistant/components/deconz/number.py @@ -31,9 +31,9 @@ from .util import serial_from_unique_id T = TypeVar("T", Presence, PydeconzSensorBase) -@dataclass -class DeconzNumberDescriptionMixin(Generic[T]): - """Required values when describing deCONZ number entities.""" +@dataclass(kw_only=True) +class DeconzNumberDescription(Generic[T], NumberEntityDescription): + """Class describing deCONZ number entities.""" instance_check: type[T] name_suffix: str @@ -42,11 +42,6 @@ class DeconzNumberDescriptionMixin(Generic[T]): value_fn: Callable[[T], float | None] -@dataclass -class DeconzNumberDescription(NumberEntityDescription, DeconzNumberDescriptionMixin[T]): - """Class describing deCONZ number entities.""" - - ENTITY_DESCRIPTIONS: tuple[DeconzNumberDescription, ...] = ( DeconzNumberDescription[Presence]( key="delay", diff --git a/homeassistant/components/deconz/sensor.py b/homeassistant/components/deconz/sensor.py index 4e00ac0a415..ecb9ac9b297 100644 --- a/homeassistant/components/deconz/sensor.py +++ b/homeassistant/components/deconz/sensor.py @@ -17,6 +17,7 @@ from pydeconz.models.sensor.generic_status import GenericStatus from pydeconz.models.sensor.humidity import Humidity from pydeconz.models.sensor.light_level import LightLevel from pydeconz.models.sensor.moisture import Moisture +from pydeconz.models.sensor.particulate_matter import ParticulateMatter from pydeconz.models.sensor.power import Power from pydeconz.models.sensor.pressure import Pressure from pydeconz.models.sensor.switch import Switch @@ -83,6 +84,7 @@ T = TypeVar( Humidity, LightLevel, Moisture, + ParticulateMatter, Power, Pressure, Temperature, @@ -91,22 +93,16 @@ T = TypeVar( ) -@dataclass -class DeconzSensorDescriptionMixin(Generic[T]): - """Required values when describing secondary sensor attributes.""" - - supported_fn: Callable[[T], bool] - update_key: str - value_fn: Callable[[T], datetime | StateType] - - -@dataclass -class DeconzSensorDescription(SensorEntityDescription, DeconzSensorDescriptionMixin[T]): +@dataclass(kw_only=True) +class DeconzSensorDescription(Generic[T], SensorEntityDescription): """Class describing deCONZ binary sensor entities.""" instance_check: type[T] | None = None name_suffix: str = "" old_unique_id_suffix: str = "" + supported_fn: Callable[[T], bool] + update_key: str + value_fn: Callable[[T], datetime | StateType] ENTITY_DESCRIPTIONS: tuple[DeconzSensorDescription, ...] = ( @@ -219,6 +215,17 @@ ENTITY_DESCRIPTIONS: tuple[DeconzSensorDescription, ...] = ( native_unit_of_measurement=PERCENTAGE, suggested_display_precision=1, ), + DeconzSensorDescription[ParticulateMatter]( + key="particulate_matter_pm2_5", + supported_fn=lambda device: device.measured_value is not None, + update_key="measured_value", + value_fn=lambda device: device.measured_value, + instance_check=ParticulateMatter, + name_suffix="PM25", + device_class=SensorDeviceClass.PM25, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + ), DeconzSensorDescription[Power]( key="power", supported_fn=lambda device: device.power is not None, diff --git a/homeassistant/components/deconz/strings.json b/homeassistant/components/deconz/strings.json index e32ab875c28..c06a07e6ce5 100644 --- a/homeassistant/components/deconz/strings.json +++ b/homeassistant/components/deconz/strings.json @@ -11,11 +11,14 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your deCONZ host." } }, "link": { "title": "Link with deCONZ", - "description": "Unlock your deCONZ gateway to register with Home Assistant.\n\n1. Go to deCONZ Settings -> Gateway -> Advanced\n2. Press \"Authenticate app\" button" + "description": "Unlock your deCONZ gateway to register with Home Assistant.\n\n1. Go to deCONZ Settings > Gateway > Advanced\n2. Press \"Authenticate app\" button" }, "hassio_confirm": { "title": "deCONZ Zigbee gateway via Home Assistant add-on", diff --git a/homeassistant/components/deluge/strings.json b/homeassistant/components/deluge/strings.json index e0266d004e2..52706f39894 100644 --- a/homeassistant/components/deluge/strings.json +++ b/homeassistant/components/deluge/strings.json @@ -9,6 +9,9 @@ "password": "[%key:common::config_flow::data::password%]", "port": "[%key:common::config_flow::data::port%]", "web_port": "Web port (for visiting service)" + }, + "data_description": { + "host": "The hostname or IP address of your Deluge device." } } }, diff --git a/homeassistant/components/demo/fan.py b/homeassistant/components/demo/fan.py index 211389a5466..73cae4a64b1 100644 --- a/homeassistant/components/demo/fan.py +++ b/homeassistant/components/demo/fan.py @@ -161,12 +161,9 @@ class DemoPercentageFan(BaseDemoFan, FanEntity): def set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - if self.preset_modes and preset_mode in self.preset_modes: - self._preset_mode = preset_mode - self._percentage = None - self.schedule_update_ha_state() - else: - raise ValueError(f"Invalid preset mode: {preset_mode}") + self._preset_mode = preset_mode + self._percentage = None + self.schedule_update_ha_state() def turn_on( self, @@ -230,10 +227,6 @@ class AsyncDemoPercentageFan(BaseDemoFan, FanEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - if self.preset_modes is None or preset_mode not in self.preset_modes: - raise ValueError( - f"{preset_mode} is not a valid preset_mode: {self.preset_modes}" - ) self._preset_mode = preset_mode self._percentage = None self.async_write_ha_state() diff --git a/homeassistant/components/devialet/__init__.py b/homeassistant/components/devialet/__init__.py new file mode 100644 index 00000000000..034f93abb68 --- /dev/null +++ b/homeassistant/components/devialet/__init__.py @@ -0,0 +1,31 @@ +"""The Devialet integration.""" +from __future__ import annotations + +from devialet import DevialetApi + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN + +PLATFORMS = [Platform.MEDIA_PLAYER] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Devialet from a config entry.""" + session = async_get_clientsession(hass) + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = DevialetApi( + entry.data[CONF_HOST], session + ) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Devialet config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + del hass.data[DOMAIN][entry.entry_id] + return unload_ok diff --git a/homeassistant/components/devialet/config_flow.py b/homeassistant/components/devialet/config_flow.py new file mode 100644 index 00000000000..de52788de50 --- /dev/null +++ b/homeassistant/components/devialet/config_flow.py @@ -0,0 +1,104 @@ +"""Support for Devialet Phantom speakers.""" +from __future__ import annotations + +import logging +from typing import Any + +from devialet.devialet_api import DevialetApi +import voluptuous as vol + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ConfigFlow +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.data_entry_flow import FlowResult +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN + +LOGGER = logging.getLogger(__package__) + + +class DevialetFlowHandler(ConfigFlow, domain=DOMAIN): + """Config flow for Devialet.""" + + VERSION = 1 + + def __init__(self) -> None: + """Initialize flow.""" + self._host: str | None = None + self._name: str | None = None + self._model: str | None = None + self._serial: str | None = None + self._errors: dict[str, str] = {} + + async def async_validate_input(self) -> FlowResult | None: + """Validate the input using the Devialet API.""" + + self._errors.clear() + session = async_get_clientsession(self.hass) + client = DevialetApi(self._host, session) + + if not await client.async_update() or client.serial is None: + self._errors["base"] = "cannot_connect" + LOGGER.error("Cannot connect") + return None + + await self.async_set_unique_id(client.serial) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=client.device_name, + data={CONF_HOST: self._host, CONF_NAME: client.device_name}, + ) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle a flow initialized by the user or zeroconf.""" + + if user_input is not None: + self._host = user_input[CONF_HOST] + result = await self.async_validate_input() + if result is not None: + return result + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Required(CONF_HOST): str}), + errors=self._errors, + ) + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> FlowResult: + """Handle a flow initialized by zeroconf discovery.""" + LOGGER.info("Devialet device found via ZEROCONF: %s", discovery_info) + + self._host = discovery_info.host + self._name = discovery_info.name.split(".", 1)[0] + self._model = discovery_info.properties["model"] + self._serial = discovery_info.properties["serialNumber"] + + await self.async_set_unique_id(self._serial) + self._abort_if_unique_id_configured() + + self.context["title_placeholders"] = {"title": self._name} + return await self.async_step_confirm() + + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle user-confirmation of discovered node.""" + title = f"{self._name} ({self._model})" + + if user_input is not None: + result = await self.async_validate_input() + if result is not None: + return result + + return self.async_show_form( + step_id="confirm", + description_placeholders={"device": self._model, "title": title}, + errors=self._errors, + last_step=True, + ) diff --git a/homeassistant/components/devialet/const.py b/homeassistant/components/devialet/const.py new file mode 100644 index 00000000000..ccb4fbc7964 --- /dev/null +++ b/homeassistant/components/devialet/const.py @@ -0,0 +1,12 @@ +"""Constants for the Devialet integration.""" +from typing import Final + +DOMAIN: Final = "devialet" +MANUFACTURER: Final = "Devialet" + +SOUND_MODES = { + "Custom": "custom", + "Flat": "flat", + "Night mode": "night mode", + "Voice": "voice", +} diff --git a/homeassistant/components/devialet/coordinator.py b/homeassistant/components/devialet/coordinator.py new file mode 100644 index 00000000000..9e1eada7183 --- /dev/null +++ b/homeassistant/components/devialet/coordinator.py @@ -0,0 +1,32 @@ +"""Class representing a Devialet update coordinator.""" +from datetime import timedelta +import logging + +from devialet import DevialetApi + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +SCAN_INTERVAL = timedelta(seconds=5) + + +class DevialetCoordinator(DataUpdateCoordinator[None]): + """Devialet update coordinator.""" + + def __init__(self, hass: HomeAssistant, client: DevialetApi) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = client + + async def _async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.client.async_update() diff --git a/homeassistant/components/devialet/diagnostics.py b/homeassistant/components/devialet/diagnostics.py new file mode 100644 index 00000000000..f9824a9cad1 --- /dev/null +++ b/homeassistant/components/devialet/diagnostics.py @@ -0,0 +1,20 @@ +"""Diagnostics support for Devialet.""" +from __future__ import annotations + +from typing import Any + +from devialet import DevialetApi + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from .const import DOMAIN + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + client: DevialetApi = hass.data[DOMAIN][entry.entry_id] + + return await client.async_get_diagnostics() diff --git a/homeassistant/components/devialet/manifest.json b/homeassistant/components/devialet/manifest.json new file mode 100644 index 00000000000..286b9bfb112 --- /dev/null +++ b/homeassistant/components/devialet/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "devialet", + "name": "Devialet", + "after_dependencies": ["zeroconf"], + "codeowners": ["@fwestenberg"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/devialet", + "integration_type": "device", + "iot_class": "local_polling", + "requirements": ["devialet==1.4.3"], + "zeroconf": ["_devialet-http._tcp.local."] +} diff --git a/homeassistant/components/devialet/media_player.py b/homeassistant/components/devialet/media_player.py new file mode 100644 index 00000000000..a79a82e6f60 --- /dev/null +++ b/homeassistant/components/devialet/media_player.py @@ -0,0 +1,212 @@ +"""Support for Devialet speakers.""" +from __future__ import annotations + +from devialet.const import NORMAL_INPUTS + +from homeassistant.components.media_player import ( + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_NAME +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER, SOUND_MODES +from .coordinator import DevialetCoordinator + +SUPPORT_DEVIALET = ( + MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_MUTE + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.SELECT_SOURCE + | MediaPlayerEntityFeature.SELECT_SOUND_MODE +) + +DEVIALET_TO_HA_FEATURE_MAP = { + "play": MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.STOP, + "pause": MediaPlayerEntityFeature.PAUSE, + "previous": MediaPlayerEntityFeature.PREVIOUS_TRACK, + "next": MediaPlayerEntityFeature.NEXT_TRACK, + "seek": MediaPlayerEntityFeature.SEEK, +} + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up the Devialet entry.""" + client = hass.data[DOMAIN][entry.entry_id] + coordinator = DevialetCoordinator(hass, client) + await coordinator.async_config_entry_first_refresh() + + async_add_entities([DevialetMediaPlayerEntity(coordinator, entry)]) + + +class DevialetMediaPlayerEntity( + CoordinatorEntity[DevialetCoordinator], MediaPlayerEntity +): + """Devialet media player.""" + + _attr_has_entity_name = True + _attr_name = None + + def __init__(self, coordinator: DevialetCoordinator, entry: ConfigEntry) -> None: + """Initialize the Devialet device.""" + self.coordinator = coordinator + super().__init__(coordinator) + + self._attr_unique_id = str(entry.unique_id) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._attr_unique_id)}, + manufacturer=MANUFACTURER, + model=self.coordinator.client.model, + name=entry.data[CONF_NAME], + sw_version=self.coordinator.client.version, + ) + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if not self.coordinator.client.is_available: + self.async_write_ha_state() + return + + self._attr_volume_level = self.coordinator.client.volume_level + self._attr_is_volume_muted = self.coordinator.client.is_volume_muted + self._attr_source_list = self.coordinator.client.source_list + self._attr_sound_mode_list = sorted(SOUND_MODES) + self._attr_media_artist = self.coordinator.client.media_artist + self._attr_media_album_name = self.coordinator.client.media_album_name + self._attr_media_artist = self.coordinator.client.media_artist + self._attr_media_image_url = self.coordinator.client.media_image_url + self._attr_media_duration = self.coordinator.client.media_duration + self._attr_media_position = self.coordinator.client.current_position + self._attr_media_position_updated_at = ( + self.coordinator.client.position_updated_at + ) + self._attr_media_title = ( + self.coordinator.client.media_title + if self.coordinator.client.media_title + else self.source + ) + self.async_write_ha_state() + + @property + def state(self) -> MediaPlayerState | None: + """Return the state of the device.""" + playing_state = self.coordinator.client.playing_state + + if not playing_state: + return MediaPlayerState.IDLE + if playing_state == "playing": + return MediaPlayerState.PLAYING + if playing_state == "paused": + return MediaPlayerState.PAUSED + return MediaPlayerState.ON + + @property + def available(self) -> bool: + """Return if the media player is available.""" + return self.coordinator.client.is_available + + @property + def supported_features(self) -> MediaPlayerEntityFeature: + """Flag media player features that are supported.""" + features = SUPPORT_DEVIALET + + if self.coordinator.client.source_state is None: + return features + + if not self.coordinator.client.available_options: + return features + + for option in self.coordinator.client.available_options: + features |= DEVIALET_TO_HA_FEATURE_MAP.get(option, 0) + return features + + @property + def source(self) -> str | None: + """Return the current input source.""" + source = self.coordinator.client.source + + for pretty_name, name in NORMAL_INPUTS.items(): + if source == name: + return pretty_name + return None + + @property + def sound_mode(self) -> str | None: + """Return the current sound mode.""" + if self.coordinator.client.equalizer is not None: + sound_mode = self.coordinator.client.equalizer + elif self.coordinator.client.night_mode: + sound_mode = "night mode" + else: + return None + + for pretty_name, mode in SOUND_MODES.items(): + if sound_mode == mode: + return pretty_name + return None + + async def async_volume_up(self) -> None: + """Volume up media player.""" + await self.coordinator.client.async_volume_up() + + async def async_volume_down(self) -> None: + """Volume down media player.""" + await self.coordinator.client.async_volume_down() + + async def async_set_volume_level(self, volume: float) -> None: + """Set volume level, range 0..1.""" + await self.coordinator.client.async_set_volume_level(volume) + + async def async_mute_volume(self, mute: bool) -> None: + """Mute (true) or unmute (false) media player.""" + await self.coordinator.client.async_mute_volume(mute) + + async def async_media_play(self) -> None: + """Play media player.""" + await self.coordinator.client.async_media_play() + + async def async_media_pause(self) -> None: + """Pause media player.""" + await self.coordinator.client.async_media_pause() + + async def async_media_stop(self) -> None: + """Pause media player.""" + await self.coordinator.client.async_media_stop() + + async def async_media_next_track(self) -> None: + """Send the next track command.""" + await self.coordinator.client.async_media_next_track() + + async def async_media_previous_track(self) -> None: + """Send the previous track command.""" + await self.coordinator.client.async_media_previous_track() + + async def async_media_seek(self, position: float) -> None: + """Send seek command.""" + await self.coordinator.client.async_media_seek(position) + + async def async_select_sound_mode(self, sound_mode: str) -> None: + """Send sound mode command.""" + for pretty_name, mode in SOUND_MODES.items(): + if sound_mode == pretty_name: + if mode == "night mode": + await self.coordinator.client.async_set_night_mode(True) + else: + await self.coordinator.client.async_set_night_mode(False) + await self.coordinator.client.async_set_equalizer(mode) + + async def async_turn_off(self) -> None: + """Turn off media player.""" + await self.coordinator.client.async_turn_off() + + async def async_select_source(self, source: str) -> None: + """Select input source.""" + await self.coordinator.client.async_select_source(source) diff --git a/homeassistant/components/devialet/strings.json b/homeassistant/components/devialet/strings.json new file mode 100644 index 00000000000..0a90da49bf4 --- /dev/null +++ b/homeassistant/components/devialet/strings.json @@ -0,0 +1,22 @@ +{ + "config": { + "flow_title": "{title}", + "step": { + "user": { + "description": "Please enter the host name or IP address of the Devialet device.", + "data": { + "host": "Host" + } + }, + "confirm": { + "description": "Do you want to set up Devialet device {device}?" + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + } + } +} diff --git a/homeassistant/components/devialet/translations/en.json b/homeassistant/components/devialet/translations/en.json new file mode 100644 index 00000000000..af0cfc4c122 --- /dev/null +++ b/homeassistant/components/devialet/translations/en.json @@ -0,0 +1,22 @@ +{ + "config": { + "abort": { + "already_configured": "Service is already configured" + }, + "error": { + "cannot_connect": "Failed to connect" + }, + "flow_title": "{title}", + "step": { + "confirm": { + "description": "Do you want to set up Devialet device {device}?" + }, + "user": { + "data": { + "host": "Host" + }, + "description": "Please enter the host name or IP address of the Devialet device." + } + } + } +} \ No newline at end of file diff --git a/homeassistant/components/device_tracker/legacy.py b/homeassistant/components/device_tracker/legacy.py index 7c12a2d8777..f18f7984e1e 100644 --- a/homeassistant/components/device_tracker/legacy.py +++ b/homeassistant/components/device_tracker/legacy.py @@ -14,7 +14,7 @@ import voluptuous as vol from homeassistant import util from homeassistant.backports.functools import cached_property from homeassistant.components import zone -from homeassistant.config import async_log_exception, load_yaml_config_file +from homeassistant.config import async_log_schema_error, load_yaml_config_file from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_GPS_ACCURACY, @@ -44,7 +44,11 @@ from homeassistant.helpers.event import ( ) from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType, GPSType, StateType -from homeassistant.setup import async_prepare_setup_platform, async_start_setup +from homeassistant.setup import ( + async_notify_setup_error, + async_prepare_setup_platform, + async_start_setup, +) from homeassistant.util import dt as dt_util from homeassistant.util.yaml import dump @@ -1006,7 +1010,8 @@ async def async_load_config( device = dev_schema(device) device["dev_id"] = cv.slugify(dev_id) except vol.Invalid as exp: - async_log_exception(exp, dev_id, devices, hass) + async_log_schema_error(exp, dev_id, devices, hass) + async_notify_setup_error(hass, DOMAIN) else: result.append(Device(hass, **device)) return result diff --git a/homeassistant/components/device_tracker/services.yaml b/homeassistant/components/device_tracker/services.yaml index 08ccbcf0b5a..3199dfd8af1 100644 --- a/homeassistant/components/device_tracker/services.yaml +++ b/homeassistant/components/device_tracker/services.yaml @@ -25,9 +25,9 @@ see: gps_accuracy: selector: number: - min: 1 - max: 100 - unit_of_measurement: "%" + min: 0 + mode: box + unit_of_measurement: "m" battery: selector: number: diff --git a/homeassistant/components/devolo_home_network/__init__.py b/homeassistant/components/devolo_home_network/__init__.py index 0fee65d57b6..842d1bee40f 100644 --- a/homeassistant/components/devolo_home_network/__init__.py +++ b/homeassistant/components/devolo_home_network/__init__.py @@ -63,7 +63,8 @@ async def async_setup_entry( # noqa: C901 ) await device.async_connect(session_instance=async_client) device.password = entry.data.get( - CONF_PASSWORD, "" # This key was added in HA Core 2022.6 + CONF_PASSWORD, + "", # This key was added in HA Core 2022.6 ) except DeviceNotFound as err: raise ConfigEntryNotReady( diff --git a/homeassistant/components/directv/strings.json b/homeassistant/components/directv/strings.json index 8ed52cd3632..2c30e3db85c 100644 --- a/homeassistant/components/directv/strings.json +++ b/homeassistant/components/directv/strings.json @@ -8,6 +8,9 @@ "user": { "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your DirectTV device." } } }, diff --git a/homeassistant/components/discovergy/__init__.py b/homeassistant/components/discovergy/__init__.py index 32f696a04ce..f21a03ef748 100644 --- a/homeassistant/components/discovergy/__init__.py +++ b/homeassistant/components/discovergy/__init__.py @@ -3,7 +3,7 @@ from __future__ import annotations from dataclasses import dataclass -import pydiscovergy +from pydiscovergy import Discovergy from pydiscovergy.authentication import BasicAuth import pydiscovergy.error as discovergyError from pydiscovergy.models import Meter @@ -24,7 +24,7 @@ PLATFORMS = [Platform.SENSOR] class DiscovergyData: """Discovergy data class to share meters and api client.""" - api_client: pydiscovergy.Discovergy + api_client: Discovergy meters: list[Meter] coordinators: dict[str, DiscovergyUpdateCoordinator] @@ -35,7 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # init discovergy data class discovergy_data = DiscovergyData( - api_client=pydiscovergy.Discovergy( + api_client=Discovergy( email=entry.data[CONF_EMAIL], password=entry.data[CONF_PASSWORD], httpx_client=get_async_client(hass), diff --git a/homeassistant/components/discovergy/config_flow.py b/homeassistant/components/discovergy/config_flow.py index b3dee2d82a0..38a250a381d 100644 --- a/homeassistant/components/discovergy/config_flow.py +++ b/homeassistant/components/discovergy/config_flow.py @@ -5,7 +5,7 @@ from collections.abc import Mapping import logging from typing import Any -import pydiscovergy +from pydiscovergy import Discovergy from pydiscovergy.authentication import BasicAuth import pydiscovergy.error as discovergyError import voluptuous as vol @@ -70,7 +70,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): if user_input: try: - await pydiscovergy.Discovergy( + await Discovergy( email=user_input[CONF_EMAIL], password=user_input[CONF_PASSWORD], httpx_client=get_async_client(self.hass), diff --git a/homeassistant/components/discovergy/coordinator.py b/homeassistant/components/discovergy/coordinator.py index 5f27c6a43d2..5a3448a9e4b 100644 --- a/homeassistant/components/discovergy/coordinator.py +++ b/homeassistant/components/discovergy/coordinator.py @@ -12,17 +12,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DOMAIN - _LOGGER = logging.getLogger(__name__) class DiscovergyUpdateCoordinator(DataUpdateCoordinator[Reading]): """The Discovergy update coordinator.""" - discovergy_client: Discovergy - meter: Meter - def __init__( self, hass: HomeAssistant, @@ -36,7 +31,7 @@ class DiscovergyUpdateCoordinator(DataUpdateCoordinator[Reading]): super().__init__( hass, _LOGGER, - name=DOMAIN, + name=f"Discovergy meter {meter.meter_id}", update_interval=timedelta(seconds=30), ) diff --git a/homeassistant/components/discovergy/diagnostics.py b/homeassistant/components/discovergy/diagnostics.py index e0a9e47e6fd..75c6f97c701 100644 --- a/homeassistant/components/discovergy/diagnostics.py +++ b/homeassistant/components/discovergy/diagnostics.py @@ -4,8 +4,6 @@ from __future__ import annotations from dataclasses import asdict from typing import Any -from pydiscovergy.models import Meter - from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -30,9 +28,8 @@ async def async_get_config_entry_diagnostics( flattened_meter: list[dict] = [] last_readings: dict[str, dict] = {} data: DiscovergyData = hass.data[DOMAIN][entry.entry_id] - meters: list[Meter] = data.meters # always returns a list - for meter in meters: + for meter in data.meters: # make a dict of meter data and redact some data flattened_meter.append(async_redact_data(asdict(meter), TO_REDACT_METER)) diff --git a/homeassistant/components/discovergy/sensor.py b/homeassistant/components/discovergy/sensor.py index 0f5ace28dd7..ed878fbb82e 100644 --- a/homeassistant/components/discovergy/sensor.py +++ b/homeassistant/components/discovergy/sensor.py @@ -27,25 +27,25 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import DiscovergyData, DiscovergyUpdateCoordinator from .const import DOMAIN, MANUFACTURER -PARALLEL_UPDATES = 1 + +def _get_and_scale(reading: Reading, key: str, scale: int) -> datetime | float | None: + """Get a value from a Reading and divide with scale it.""" + if (value := reading.values.get(key)) is not None: + return value / scale + return None -@dataclass -class DiscovergyMixin: - """Mixin for alternative keys.""" +@dataclass(kw_only=True) +class DiscovergySensorEntityDescription(SensorEntityDescription): + """Class to describe a Discovergy sensor entity.""" value_fn: Callable[[Reading, str, int], datetime | float | None] = field( - default=lambda reading, key, scale: float(reading.values[key] / scale) + default=_get_and_scale ) alternative_keys: list[str] = field(default_factory=lambda: []) scale: int = field(default_factory=lambda: 1000) -@dataclass -class DiscovergySensorEntityDescription(DiscovergyMixin, SensorEntityDescription): - """Define Sensor entity description class.""" - - GAS_SENSORS: tuple[DiscovergySensorEntityDescription, ...] = ( DiscovergySensorEntityDescription( key="volume", @@ -166,37 +166,30 @@ async def async_setup_entry( ) -> None: """Set up the Discovergy sensors.""" data: DiscovergyData = hass.data[DOMAIN][entry.entry_id] - meters: list[Meter] = data.meters # always returns a list entities: list[DiscovergySensor] = [] - for meter in meters: - sensors = None - if meter.measurement_type == "ELECTRICITY": - sensors = ELECTRICITY_SENSORS - elif meter.measurement_type == "GAS": - sensors = GAS_SENSORS - + for meter in data.meters: + sensors: tuple[DiscovergySensorEntityDescription, ...] = () coordinator: DiscovergyUpdateCoordinator = data.coordinators[meter.meter_id] - if sensors is not None: - for description in sensors: - # check if this meter has this data, then add this sensor - for key in {description.key, *description.alternative_keys}: - if key in coordinator.data.values: - entities.append( - DiscovergySensor(key, description, meter, coordinator) - ) + # select sensor descriptions based on meter type and combine with additional sensors + if meter.measurement_type == "ELECTRICITY": + sensors = ELECTRICITY_SENSORS + ADDITIONAL_SENSORS + elif meter.measurement_type == "GAS": + sensors = GAS_SENSORS + ADDITIONAL_SENSORS - for description in ADDITIONAL_SENSORS: - entities.append( - DiscovergySensor(description.key, description, meter, coordinator) - ) + entities.extend( + DiscovergySensor(value_key, description, meter, coordinator) + for description in sensors + for value_key in {description.key, *description.alternative_keys} + if description.value_fn(coordinator.data, value_key, description.scale) + ) - async_add_entities(entities, False) + async_add_entities(entities) class DiscovergySensor(CoordinatorEntity[DiscovergyUpdateCoordinator], SensorEntity): - """Represents a discovergy smart meter sensor.""" + """Represents a Discovergy smart meter sensor.""" entity_description: DiscovergySensorEntityDescription data_key: str diff --git a/homeassistant/components/dlink/strings.json b/homeassistant/components/dlink/strings.json index 8c60d59fa6b..9f21a9571e9 100644 --- a/homeassistant/components/dlink/strings.json +++ b/homeassistant/components/dlink/strings.json @@ -9,6 +9,7 @@ "use_legacy_protocol": "Use legacy protocol" }, "data_description": { + "host": "The hostname or IP address of your D-Link device", "password": "Default: PIN code on the back." } }, diff --git a/homeassistant/components/dlna_dmr/media_player.py b/homeassistant/components/dlna_dmr/media_player.py index 3a57ba2c8ce..cd2f1ae2f50 100644 --- a/homeassistant/components/dlna_dmr/media_player.py +++ b/homeassistant/components/dlna_dmr/media_player.py @@ -453,10 +453,9 @@ class DlnaDmrEntity(MediaPlayerEntity): for state_variable in state_variables: # Force a state refresh when player begins or pauses playback # to update the position info. - if ( - state_variable.name == "TransportState" - and state_variable.value - in (TransportState.PLAYING, TransportState.PAUSED_PLAYBACK) + if state_variable.name == "TransportState" and state_variable.value in ( + TransportState.PLAYING, + TransportState.PAUSED_PLAYBACK, ): force_refresh = True diff --git a/homeassistant/components/doorbird/strings.json b/homeassistant/components/doorbird/strings.json index ceaf1a891ee..c851de379d4 100644 --- a/homeassistant/components/doorbird/strings.json +++ b/homeassistant/components/doorbird/strings.json @@ -17,8 +17,11 @@ "data": { "password": "[%key:common::config_flow::data::password%]", "host": "[%key:common::config_flow::data::host%]", - "name": "Device Name", + "name": "Device name", "username": "[%key:common::config_flow::data::username%]" + }, + "data_description": { + "host": "The hostname or IP address of your DoorBird device." } } }, diff --git a/homeassistant/components/dremel_3d_printer/strings.json b/homeassistant/components/dremel_3d_printer/strings.json index 0016b8f2bca..9f6870b57f6 100644 --- a/homeassistant/components/dremel_3d_printer/strings.json +++ b/homeassistant/components/dremel_3d_printer/strings.json @@ -4,6 +4,9 @@ "user": { "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your Dremel 3D printer." } } }, diff --git a/homeassistant/components/dsmr/const.py b/homeassistant/components/dsmr/const.py index 5e1a54aedc4..ec0623a9ed6 100644 --- a/homeassistant/components/dsmr/const.py +++ b/homeassistant/components/dsmr/const.py @@ -29,6 +29,7 @@ DATA_TASK = "task" DEVICE_NAME_ELECTRICITY = "Electricity Meter" DEVICE_NAME_GAS = "Gas Meter" +DEVICE_NAME_WATER = "Water Meter" DSMR_VERSIONS = {"2.2", "4", "5", "5B", "5L", "5S", "Q3D"} diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index fa58bd8c5a6..0fa04dee489 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -34,6 +34,7 @@ from homeassistant.const import ( UnitOfVolume, ) from homeassistant.core import CoreState, Event, HomeAssistant, callback +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, @@ -57,6 +58,7 @@ from .const import ( DEFAULT_TIME_BETWEEN_UPDATE, DEVICE_NAME_ELECTRICITY, DEVICE_NAME_GAS, + DEVICE_NAME_WATER, DOMAIN, DSMR_PROTOCOL, LOGGER, @@ -67,21 +69,14 @@ EVENT_FIRST_TELEGRAM = "dsmr_first_telegram_{}" UNIT_CONVERSION = {"m3": UnitOfVolume.CUBIC_METERS} -@dataclass -class DSMRSensorEntityDescriptionMixin: - """Mixin for required keys.""" - - obis_reference: str - - -@dataclass -class DSMRSensorEntityDescription( - SensorEntityDescription, DSMRSensorEntityDescriptionMixin -): +@dataclass(kw_only=True) +class DSMRSensorEntityDescription(SensorEntityDescription): """Represents an DSMR Sensor.""" dsmr_versions: set[str] | None = None is_gas: bool = False + is_water: bool = False + obis_reference: str SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( @@ -90,7 +85,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="current_electricity_usage", obis_reference=obis_references.CURRENT_ELECTRICITY_USAGE, device_class=SensorDeviceClass.POWER, - force_update=True, state_class=SensorStateClass.MEASUREMENT, ), DSMRSensorEntityDescription( @@ -98,7 +92,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="current_electricity_delivery", obis_reference=obis_references.CURRENT_ELECTRICITY_DELIVERY, device_class=SensorDeviceClass.POWER, - force_update=True, state_class=SensorStateClass.MEASUREMENT, ), DSMRSensorEntityDescription( @@ -116,7 +109,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( obis_reference=obis_references.ELECTRICITY_USED_TARIFF_1, dsmr_versions={"2.2", "4", "5", "5B", "5L"}, device_class=SensorDeviceClass.ENERGY, - force_update=True, state_class=SensorStateClass.TOTAL_INCREASING, ), DSMRSensorEntityDescription( @@ -124,7 +116,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="electricity_used_tariff_2", obis_reference=obis_references.ELECTRICITY_USED_TARIFF_2, dsmr_versions={"2.2", "4", "5", "5B", "5L"}, - force_update=True, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, ), @@ -133,7 +124,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="electricity_delivered_tariff_1", obis_reference=obis_references.ELECTRICITY_DELIVERED_TARIFF_1, dsmr_versions={"2.2", "4", "5", "5B", "5L"}, - force_update=True, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, ), @@ -142,7 +132,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="electricity_delivered_tariff_2", obis_reference=obis_references.ELECTRICITY_DELIVERED_TARIFF_2, dsmr_versions={"2.2", "4", "5", "5B", "5L"}, - force_update=True, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, ), @@ -342,7 +331,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="electricity_imported_total", obis_reference=obis_references.ELECTRICITY_IMPORTED_TOTAL, dsmr_versions={"5L", "5S", "Q3D"}, - force_update=True, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, ), @@ -351,7 +339,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="electricity_exported_total", obis_reference=obis_references.ELECTRICITY_EXPORTED_TOTAL, dsmr_versions={"5L", "5S", "Q3D"}, - force_update=True, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, ), @@ -360,7 +347,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="current_average_demand", obis_reference=obis_references.BELGIUM_CURRENT_AVERAGE_DEMAND, dsmr_versions={"5B"}, - force_update=True, device_class=SensorDeviceClass.POWER, ), DSMRSensorEntityDescription( @@ -368,7 +354,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( translation_key="maximum_demand_current_month", obis_reference=obis_references.BELGIUM_MAXIMUM_DEMAND_MONTH, dsmr_versions={"5B"}, - force_update=True, device_class=SensorDeviceClass.POWER, ), DSMRSensorEntityDescription( @@ -377,7 +362,6 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( obis_reference=obis_references.HOURLY_GAS_METER_READING, dsmr_versions={"4", "5", "5L"}, is_gas=True, - force_update=True, device_class=SensorDeviceClass.GAS, state_class=SensorStateClass.TOTAL_INCREASING, ), @@ -387,36 +371,144 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( obis_reference=obis_references.GAS_METER_READING, dsmr_versions={"2.2"}, is_gas=True, - force_update=True, device_class=SensorDeviceClass.GAS, state_class=SensorStateClass.TOTAL_INCREASING, ), ) -def add_gas_sensor_5B(telegram: dict[str, DSMRObject]) -> DSMRSensorEntityDescription: - """Return correct entity for 5B Gas meter.""" - ref = None - if obis_references.BELGIUM_MBUS1_METER_READING2 in telegram: - ref = obis_references.BELGIUM_MBUS1_METER_READING2 - elif obis_references.BELGIUM_MBUS2_METER_READING2 in telegram: - ref = obis_references.BELGIUM_MBUS2_METER_READING2 - elif obis_references.BELGIUM_MBUS3_METER_READING2 in telegram: - ref = obis_references.BELGIUM_MBUS3_METER_READING2 - elif obis_references.BELGIUM_MBUS4_METER_READING2 in telegram: - ref = obis_references.BELGIUM_MBUS4_METER_READING2 - elif ref is None: - ref = obis_references.BELGIUM_MBUS1_METER_READING2 - return DSMRSensorEntityDescription( - key="belgium_5min_gas_meter_reading", - translation_key="gas_meter_reading", - obis_reference=ref, - dsmr_versions={"5B"}, - is_gas=True, - force_update=True, - device_class=SensorDeviceClass.GAS, - state_class=SensorStateClass.TOTAL_INCREASING, - ) +def create_mbus_entity( + mbus: int, mtype: int, telegram: dict[str, DSMRObject] +) -> DSMRSensorEntityDescription | None: + """Create a new MBUS Entity.""" + if ( + mtype == 3 + and ( + obis_reference := getattr( + obis_references, f"BELGIUM_MBUS{mbus}_METER_READING2" + ) + ) + in telegram + ): + return DSMRSensorEntityDescription( + key=f"mbus{mbus}_gas_reading", + translation_key="gas_meter_reading", + obis_reference=obis_reference, + is_gas=True, + device_class=SensorDeviceClass.GAS, + state_class=SensorStateClass.TOTAL_INCREASING, + ) + if ( + mtype == 7 + and ( + obis_reference := getattr( + obis_references, f"BELGIUM_MBUS{mbus}_METER_READING1" + ) + ) + in telegram + ): + return DSMRSensorEntityDescription( + key=f"mbus{mbus}_water_reading", + translation_key="water_meter_reading", + obis_reference=obis_reference, + is_water=True, + device_class=SensorDeviceClass.WATER, + state_class=SensorStateClass.TOTAL_INCREASING, + ) + return None + + +def device_class_and_uom( + telegram: dict[str, DSMRObject], + entity_description: DSMRSensorEntityDescription, +) -> tuple[SensorDeviceClass | None, str | None]: + """Get native unit of measurement from telegram,.""" + dsmr_object = telegram[entity_description.obis_reference] + uom: str | None = getattr(dsmr_object, "unit") or None + with suppress(ValueError): + if entity_description.device_class == SensorDeviceClass.GAS and ( + enery_uom := UnitOfEnergy(str(uom)) + ): + return (SensorDeviceClass.ENERGY, enery_uom) + if uom in UNIT_CONVERSION: + return (entity_description.device_class, UNIT_CONVERSION[uom]) + return (entity_description.device_class, uom) + + +def rename_old_gas_to_mbus( + hass: HomeAssistant, entry: ConfigEntry, mbus_device_id: str +) -> None: + """Rename old gas sensor to mbus variant.""" + dev_reg = dr.async_get(hass) + device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, entry.entry_id)}) + if device_entry_v1 is not None: + device_id = device_entry_v1.id + + ent_reg = er.async_get(hass) + entries = er.async_entries_for_device(ent_reg, device_id) + + for entity in entries: + if entity.unique_id.endswith("belgium_5min_gas_meter_reading"): + try: + ent_reg.async_update_entity( + entity.entity_id, + new_unique_id=mbus_device_id, + device_id=mbus_device_id, + ) + except ValueError: + LOGGER.debug( + "Skip migration of %s because it already exists", + entity.entity_id, + ) + else: + LOGGER.debug( + "Migrated entity %s from unique id %s to %s", + entity.entity_id, + entity.unique_id, + mbus_device_id, + ) + # Cleanup old device + dev_entities = er.async_entries_for_device( + ent_reg, device_id, include_disabled_entities=True + ) + if not dev_entities: + dev_reg.async_remove_device(device_id) + + +def create_mbus_entities( + hass: HomeAssistant, telegram: dict[str, DSMRObject], entry: ConfigEntry +) -> list[DSMREntity]: + """Create MBUS Entities.""" + entities = [] + for idx in range(1, 5): + if ( + device_type := getattr(obis_references, f"BELGIUM_MBUS{idx}_DEVICE_TYPE") + ) not in telegram: + continue + if (type_ := int(telegram[device_type].value)) not in (3, 7): + continue + if ( + identifier := getattr( + obis_references, + f"BELGIUM_MBUS{idx}_EQUIPMENT_IDENTIFIER", + ) + ) in telegram: + serial_ = telegram[identifier].value + rename_old_gas_to_mbus(hass, entry, serial_) + else: + serial_ = "" + if description := create_mbus_entity(idx, type_, telegram): + entities.append( + DSMREntity( + description, + entry, + telegram, + *device_class_and_uom(telegram, description), # type: ignore[arg-type] + serial_, + idx, + ) + ) + return entities async def async_setup_entry( @@ -436,25 +528,10 @@ async def async_setup_entry( add_entities_handler() add_entities_handler = None - def device_class_and_uom( - telegram: dict[str, DSMRObject], - entity_description: DSMRSensorEntityDescription, - ) -> tuple[SensorDeviceClass | None, str | None]: - """Get native unit of measurement from telegram,.""" - dsmr_object = telegram[entity_description.obis_reference] - uom: str | None = getattr(dsmr_object, "unit") or None - with suppress(ValueError): - if entity_description.device_class == SensorDeviceClass.GAS and ( - enery_uom := UnitOfEnergy(str(uom)) - ): - return (SensorDeviceClass.ENERGY, enery_uom) - if uom in UNIT_CONVERSION: - return (entity_description.device_class, UNIT_CONVERSION[uom]) - return (entity_description.device_class, uom) - - all_sensors = SENSORS if dsmr_version == "5B": - all_sensors += (add_gas_sensor_5B(telegram),) + mbus_entities = create_mbus_entities(hass, telegram, entry) + for mbus_entity in mbus_entities: + entities.append(mbus_entity) entities.extend( [ @@ -462,11 +539,9 @@ async def async_setup_entry( description, entry, telegram, - *device_class_and_uom( - telegram, description - ), # type: ignore[arg-type] + *device_class_and_uom(telegram, description), # type: ignore[arg-type] ) - for description in all_sensors + for description in SENSORS if ( description.dsmr_versions is None or dsmr_version in description.dsmr_versions @@ -641,6 +716,8 @@ class DSMREntity(SensorEntity): telegram: dict[str, DSMRObject], device_class: SensorDeviceClass, native_unit_of_measurement: str | None, + serial_id: str = "", + mbus_id: int = 0, ) -> None: """Initialize entity.""" self.entity_description = entity_description @@ -652,8 +729,15 @@ class DSMREntity(SensorEntity): device_serial = entry.data[CONF_SERIAL_ID] device_name = DEVICE_NAME_ELECTRICITY if entity_description.is_gas: - device_serial = entry.data[CONF_SERIAL_ID_GAS] + if serial_id: + device_serial = serial_id + else: + device_serial = entry.data[CONF_SERIAL_ID_GAS] device_name = DEVICE_NAME_GAS + if entity_description.is_water: + if serial_id: + device_serial = serial_id + device_name = DEVICE_NAME_WATER if device_serial is None: device_serial = entry.entry_id @@ -661,7 +745,13 @@ class DSMREntity(SensorEntity): identifiers={(DOMAIN, device_serial)}, name=device_name, ) - self._attr_unique_id = f"{device_serial}_{entity_description.key}" + if mbus_id != 0: + if serial_id: + self._attr_unique_id = f"{device_serial}" + else: + self._attr_unique_id = f"{device_serial}_{mbus_id}" + else: + self._attr_unique_id = f"{device_serial}_{entity_description.key}" @callback def update_data(self, telegram: dict[str, DSMRObject] | None) -> None: @@ -709,6 +799,10 @@ class DSMREntity(SensorEntity): float(value), self._entry.data.get(CONF_PRECISION, DEFAULT_PRECISION) ) + # Make sure we do not return a zero value for an energy sensor + if not value and self.state_class == SensorStateClass.TOTAL_INCREASING: + return None + return value @staticmethod diff --git a/homeassistant/components/dsmr/strings.json b/homeassistant/components/dsmr/strings.json index 5f0568e2905..055c0c41264 100644 --- a/homeassistant/components/dsmr/strings.json +++ b/homeassistant/components/dsmr/strings.json @@ -147,6 +147,9 @@ }, "voltage_swell_l3_count": { "name": "Voltage swells phase L3" + }, + "water_meter_reading": { + "name": "Water consumption" } } }, diff --git a/homeassistant/components/dunehd/strings.json b/homeassistant/components/dunehd/strings.json index f7e12b39f16..7d60a720a98 100644 --- a/homeassistant/components/dunehd/strings.json +++ b/homeassistant/components/dunehd/strings.json @@ -5,6 +5,9 @@ "description": "Ensure that your player is turned on.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your Dune HD device." } } }, diff --git a/homeassistant/components/duotecno/strings.json b/homeassistant/components/duotecno/strings.json index 93a545d31dc..a5585c3dd2c 100644 --- a/homeassistant/components/duotecno/strings.json +++ b/homeassistant/components/duotecno/strings.json @@ -6,6 +6,9 @@ "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your Duotecno device." } } }, diff --git a/homeassistant/components/easyenergy/manifest.json b/homeassistant/components/easyenergy/manifest.json index 5755a1b3dbe..6fa177c7221 100644 --- a/homeassistant/components/easyenergy/manifest.json +++ b/homeassistant/components/easyenergy/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/easyenergy", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["easyenergy==0.3.0"] + "requirements": ["easyenergy==1.0.0"] } diff --git a/homeassistant/components/ecobee/climate.py b/homeassistant/components/ecobee/climate.py index e1253b585ac..1b0e65f7390 100644 --- a/homeassistant/components/ecobee/climate.py +++ b/homeassistant/components/ecobee/climate.py @@ -99,6 +99,7 @@ ECOBEE_HVAC_ACTION_TO_HASS = { "economizer": HVACAction.FAN, "compHotWater": None, "auxHotWater": None, + "compWaterHeater": None, } PRESET_TO_ECOBEE_HOLD = { diff --git a/homeassistant/components/ecobee/manifest.json b/homeassistant/components/ecobee/manifest.json index ffb7fe8adfe..1160cd946d9 100644 --- a/homeassistant/components/ecobee/manifest.json +++ b/homeassistant/components/ecobee/manifest.json @@ -1,7 +1,7 @@ { "domain": "ecobee", "name": "ecobee", - "codeowners": ["@marthoc", "@marcolivierarsenault"], + "codeowners": ["@marcolivierarsenault"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ecobee", "homekit": { diff --git a/homeassistant/components/ecoforest/strings.json b/homeassistant/components/ecoforest/strings.json index d1767be5cda..1094e10ada3 100644 --- a/homeassistant/components/ecoforest/strings.json +++ b/homeassistant/components/ecoforest/strings.json @@ -6,6 +6,9 @@ "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your Ecoforest device." } } }, diff --git a/homeassistant/components/econet/__init__.py b/homeassistant/components/econet/__init__.py index 36cdeb68821..67cbd7496e3 100644 --- a/homeassistant/components/econet/__init__.py +++ b/homeassistant/components/econet/__init__.py @@ -1,4 +1,5 @@ """Support for EcoNet products.""" +import asyncio from datetime import timedelta import logging @@ -80,14 +81,11 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b await hass.async_add_executor_job(api.unsubscribe) api.subscribe() - async def fetch_update(now): - """Fetch the latest changes from the API.""" + # Refresh values + await asyncio.sleep(60) await api.refresh_equipment() config_entry.async_on_unload(async_track_time_interval(hass, resubscribe, INTERVAL)) - config_entry.async_on_unload( - async_track_time_interval(hass, fetch_update, INTERVAL + timedelta(minutes=1)) - ) return True diff --git a/homeassistant/components/econet/climate.py b/homeassistant/components/econet/climate.py index e77c4face74..f5328da4776 100644 --- a/homeassistant/components/econet/climate.py +++ b/homeassistant/components/econet/climate.py @@ -64,6 +64,7 @@ async def async_setup_entry( class EcoNetThermostat(EcoNetEntity, ClimateEntity): """Define an Econet thermostat.""" + _attr_should_poll = True _attr_temperature_unit = UnitOfTemperature.FAHRENHEIT def __init__(self, thermostat): diff --git a/homeassistant/components/econet/water_heater.py b/homeassistant/components/econet/water_heater.py index cbaf4551d03..a99ab087729 100644 --- a/homeassistant/components/econet/water_heater.py +++ b/homeassistant/components/econet/water_heater.py @@ -1,4 +1,5 @@ """Support for Rheem EcoNet water heaters.""" +from datetime import timedelta import logging from typing import Any @@ -17,12 +18,14 @@ from homeassistant.components.water_heater import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, UnitOfTemperature -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import EcoNetEntity from .const import DOMAIN, EQUIPMENT +SCAN_INTERVAL = timedelta(hours=1) + _LOGGER = logging.getLogger(__name__) ECONET_STATE_TO_HA = { @@ -52,6 +55,7 @@ async def async_setup_entry( EcoNetWaterHeater(water_heater) for water_heater in equipment[EquipmentType.WATER_HEATER] ], + update_before_add=True, ) @@ -64,18 +68,8 @@ class EcoNetWaterHeater(EcoNetEntity, WaterHeaterEntity): def __init__(self, water_heater): """Initialize.""" super().__init__(water_heater) - self._running = water_heater.running self.water_heater = water_heater - @callback - def on_update_received(self): - """Update was pushed from the econet API.""" - if self._running != self.water_heater.running: - # Water heater running state has changed so check usage on next update - self._attr_should_poll = True - self._running = self.water_heater.running - self.async_write_ha_state() - @property def is_away_mode_on(self): """Return true if away mode is on.""" @@ -153,8 +147,6 @@ class EcoNetWaterHeater(EcoNetEntity, WaterHeaterEntity): """Get the latest energy usage.""" await self.water_heater.get_energy_usage() await self.water_heater.get_water_usage() - self.async_write_ha_state() - self._attr_should_poll = False def turn_away_mode_on(self) -> None: """Turn away mode on.""" diff --git a/homeassistant/components/elgato/button.py b/homeassistant/components/elgato/button.py index b05cd532c16..7a69db24012 100644 --- a/homeassistant/components/elgato/button.py +++ b/homeassistant/components/elgato/button.py @@ -23,20 +23,13 @@ from .coordinator import ElgatoDataUpdateCoordinator from .entity import ElgatoEntity -@dataclass -class ElgatoButtonEntityDescriptionMixin: - """Mixin values for Elgato entities.""" +@dataclass(kw_only=True) +class ElgatoButtonEntityDescription(ButtonEntityDescription): + """Class describing Elgato button entities.""" press_fn: Callable[[Elgato], Awaitable[Any]] -@dataclass -class ElgatoButtonEntityDescription( - ButtonEntityDescription, ElgatoButtonEntityDescriptionMixin -): - """Class describing Elgato button entities.""" - - BUTTONS = [ ElgatoButtonEntityDescription( key="identify", diff --git a/homeassistant/components/elgato/manifest.json b/homeassistant/components/elgato/manifest.json index 033a2567bb4..0671a7adb1d 100644 --- a/homeassistant/components/elgato/manifest.json +++ b/homeassistant/components/elgato/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "platinum", - "requirements": ["elgato==5.1.0"], + "requirements": ["elgato==5.1.1"], "zeroconf": ["_elg._tcp.local."] } diff --git a/homeassistant/components/elgato/sensor.py b/homeassistant/components/elgato/sensor.py index 8ed8265705c..27dedee25c9 100644 --- a/homeassistant/components/elgato/sensor.py +++ b/homeassistant/components/elgato/sensor.py @@ -26,20 +26,12 @@ from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity -@dataclass -class ElgatoEntityDescriptionMixin: - """Mixin values for Elgato entities.""" - - value_fn: Callable[[ElgatoData], float | int | None] - - -@dataclass -class ElgatoSensorEntityDescription( - SensorEntityDescription, ElgatoEntityDescriptionMixin -): +@dataclass(kw_only=True) +class ElgatoSensorEntityDescription(SensorEntityDescription): """Class describing Elgato sensor entities.""" has_fn: Callable[[ElgatoData], bool] = lambda _: True + value_fn: Callable[[ElgatoData], float | int | None] SENSORS = [ diff --git a/homeassistant/components/elgato/strings.json b/homeassistant/components/elgato/strings.json index e6b16215793..6e1031c8ddf 100644 --- a/homeassistant/components/elgato/strings.json +++ b/homeassistant/components/elgato/strings.json @@ -7,6 +7,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your Elgato device." } }, "zeroconf_confirm": { diff --git a/homeassistant/components/elgato/switch.py b/homeassistant/components/elgato/switch.py index 78af3adfa53..e9ab506c3a4 100644 --- a/homeassistant/components/elgato/switch.py +++ b/homeassistant/components/elgato/switch.py @@ -19,21 +19,13 @@ from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity -@dataclass -class ElgatoEntityDescriptionMixin: - """Mixin values for Elgato entities.""" - - is_on_fn: Callable[[ElgatoData], bool | None] - set_fn: Callable[[Elgato, bool], Awaitable[Any]] - - -@dataclass -class ElgatoSwitchEntityDescription( - SwitchEntityDescription, ElgatoEntityDescriptionMixin -): +@dataclass(kw_only=True) +class ElgatoSwitchEntityDescription(SwitchEntityDescription): """Class describing Elgato switch entities.""" has_fn: Callable[[ElgatoData], bool] = lambda _: True + is_on_fn: Callable[[ElgatoData], bool | None] + set_fn: Callable[[Elgato, bool], Awaitable[Any]] SWITCHES = [ diff --git a/homeassistant/components/elmax/cover.py b/homeassistant/components/elmax/cover.py index 8a6acb154aa..e05b17b9171 100644 --- a/homeassistant/components/elmax/cover.py +++ b/homeassistant/components/elmax/cover.py @@ -18,13 +18,11 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) -_COMMAND_BY_MOTION_STATUS = ( - { # Maps the stop command to use for every cover motion status - CoverStatus.DOWN: CoverCommand.DOWN, - CoverStatus.UP: CoverCommand.UP, - CoverStatus.IDLE: None, - } -) +_COMMAND_BY_MOTION_STATUS = { # Maps the stop command to use for every cover motion status + CoverStatus.DOWN: CoverCommand.DOWN, + CoverStatus.UP: CoverCommand.UP, + CoverStatus.IDLE: None, +} async def async_setup_entry( diff --git a/homeassistant/components/emonitor/strings.json b/homeassistant/components/emonitor/strings.json index 675db107935..08ffe030890 100644 --- a/homeassistant/components/emonitor/strings.json +++ b/homeassistant/components/emonitor/strings.json @@ -5,6 +5,9 @@ "user": { "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your SiteSage Emonitor device." } }, "confirm": { diff --git a/homeassistant/components/emulated_hue/__init__.py b/homeassistant/components/emulated_hue/__init__.py index a98d2c08a48..1ba93da716c 100644 --- a/homeassistant/components/emulated_hue/__init__.py +++ b/homeassistant/components/emulated_hue/__init__.py @@ -6,7 +6,6 @@ import logging from aiohttp import web import voluptuous as vol -from homeassistant.components.http import HomeAssistantAccessLogger from homeassistant.components.network import async_get_source_ip from homeassistant.const import ( CONF_ENTITIES, @@ -101,7 +100,7 @@ async def start_emulated_hue_bridge( config.advertise_port or config.listen_port, ) - runner = web.AppRunner(app, access_log_class=HomeAssistantAccessLogger) + runner = web.AppRunner(app) await runner.setup() site = web.TCPSite(runner, config.host_ip_addr, config.listen_port) diff --git a/homeassistant/components/emulated_hue/hue_api.py b/homeassistant/components/emulated_hue/hue_api.py index 6dfd49c371c..ad6b0541cd6 100644 --- a/homeassistant/components/emulated_hue/hue_api.py +++ b/homeassistant/components/emulated_hue/hue_api.py @@ -676,19 +676,20 @@ def get_entity_state_dict(config: Config, entity: State) -> dict[str, Any]: @lru_cache(maxsize=512) def _build_entity_state_dict(entity: State) -> dict[str, Any]: """Build a state dict for an entity.""" + is_on = entity.state != STATE_OFF data: dict[str, Any] = { - STATE_ON: entity.state != STATE_OFF, + STATE_ON: is_on, STATE_BRIGHTNESS: None, STATE_HUE: None, STATE_SATURATION: None, STATE_COLOR_TEMP: None, } - if data[STATE_ON]: + attributes = entity.attributes + if is_on: data[STATE_BRIGHTNESS] = hass_to_hue_brightness( - entity.attributes.get(ATTR_BRIGHTNESS, 0) + attributes.get(ATTR_BRIGHTNESS) or 0 ) - hue_sat = entity.attributes.get(ATTR_HS_COLOR) - if hue_sat is not None: + if (hue_sat := attributes.get(ATTR_HS_COLOR)) is not None: hue = hue_sat[0] sat = hue_sat[1] # Convert hass hs values back to hue hs values @@ -697,7 +698,7 @@ def _build_entity_state_dict(entity: State) -> dict[str, Any]: else: data[STATE_HUE] = HUE_API_STATE_HUE_MIN data[STATE_SATURATION] = HUE_API_STATE_SAT_MIN - data[STATE_COLOR_TEMP] = entity.attributes.get(ATTR_COLOR_TEMP, 0) + data[STATE_COLOR_TEMP] = attributes.get(ATTR_COLOR_TEMP) or 0 else: data[STATE_BRIGHTNESS] = 0 @@ -706,25 +707,23 @@ def _build_entity_state_dict(entity: State) -> dict[str, Any]: data[STATE_COLOR_TEMP] = 0 if entity.domain == climate.DOMAIN: - temperature = entity.attributes.get(ATTR_TEMPERATURE, 0) + temperature = attributes.get(ATTR_TEMPERATURE, 0) # Convert 0-100 to 0-254 data[STATE_BRIGHTNESS] = round(temperature * HUE_API_STATE_BRI_MAX / 100) elif entity.domain == humidifier.DOMAIN: - humidity = entity.attributes.get(ATTR_HUMIDITY, 0) + humidity = attributes.get(ATTR_HUMIDITY, 0) # Convert 0-100 to 0-254 data[STATE_BRIGHTNESS] = round(humidity * HUE_API_STATE_BRI_MAX / 100) elif entity.domain == media_player.DOMAIN: - level = entity.attributes.get( - ATTR_MEDIA_VOLUME_LEVEL, 1.0 if data[STATE_ON] else 0.0 - ) + level = attributes.get(ATTR_MEDIA_VOLUME_LEVEL, 1.0 if is_on else 0.0) # Convert 0.0-1.0 to 0-254 data[STATE_BRIGHTNESS] = round(min(1.0, level) * HUE_API_STATE_BRI_MAX) elif entity.domain == fan.DOMAIN: - percentage = entity.attributes.get(ATTR_PERCENTAGE) or 0 + percentage = attributes.get(ATTR_PERCENTAGE) or 0 # Convert 0-100 to 0-254 data[STATE_BRIGHTNESS] = round(percentage * HUE_API_STATE_BRI_MAX / 100) elif entity.domain == cover.DOMAIN: - level = entity.attributes.get(ATTR_CURRENT_POSITION, 0) + level = attributes.get(ATTR_CURRENT_POSITION, 0) data[STATE_BRIGHTNESS] = round(level / 100 * HUE_API_STATE_BRI_MAX) _clamp_values(data) return data @@ -773,7 +772,9 @@ def state_to_json(config: Config, state: State) -> dict[str, Any]: "swversion": "123", } - if light.color_supported(color_modes) and light.color_temp_supported(color_modes): + color_supported = light.color_supported(color_modes) + color_temp_supported = light.color_temp_supported(color_modes) + if color_supported and color_temp_supported: # Extended Color light (Zigbee Device ID: 0x0210) # Same as Color light, but which supports additional setting of color temperature retval["type"] = "Extended color light" @@ -791,7 +792,7 @@ def state_to_json(config: Config, state: State) -> dict[str, Any]: json_state[HUE_API_STATE_COLORMODE] = "hs" else: json_state[HUE_API_STATE_COLORMODE] = "ct" - elif light.color_supported(color_modes): + elif color_supported: # Color light (Zigbee Device ID: 0x0200) # Supports on/off, dimming and color control (hue/saturation, enhanced hue, color loop and XY) retval["type"] = "Color light" @@ -805,7 +806,7 @@ def state_to_json(config: Config, state: State) -> dict[str, Any]: HUE_API_STATE_EFFECT: "none", } ) - elif light.color_temp_supported(color_modes): + elif color_temp_supported: # Color temperature light (Zigbee Device ID: 0x0220) # Supports groups, scenes, on/off, dimming, and setting of a color temperature retval["type"] = "Color temperature light" diff --git a/homeassistant/components/energyzero/manifest.json b/homeassistant/components/energyzero/manifest.json index 8e2b8aba894..9ef99173ffb 100644 --- a/homeassistant/components/energyzero/manifest.json +++ b/homeassistant/components/energyzero/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/energyzero", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["energyzero==0.5.0"] + "requirements": ["energyzero==1.0.0"] } diff --git a/homeassistant/components/enphase_envoy/manifest.json b/homeassistant/components/enphase_envoy/manifest.json index 718c33d2811..c49e1f143e6 100644 --- a/homeassistant/components/enphase_envoy/manifest.json +++ b/homeassistant/components/enphase_envoy/manifest.json @@ -1,12 +1,12 @@ { "domain": "enphase_envoy", "name": "Enphase Envoy", - "codeowners": ["@bdraco", "@cgarwood", "@dgomes", "@joostlek"], + "codeowners": ["@bdraco", "@cgarwood", "@dgomes", "@joostlek", "@catsmanac"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "iot_class": "local_polling", "loggers": ["pyenphase"], - "requirements": ["pyenphase==1.14.2"], + "requirements": ["pyenphase==1.14.3"], "zeroconf": [ { "type": "_enphase-envoy._tcp.local." diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index 94cf9233745..fe32002e6b2 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -8,6 +8,9 @@ "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your Enphase Envoy gateway." } } }, diff --git a/homeassistant/components/envisalink/manifest.json b/homeassistant/components/envisalink/manifest.json index c048687c906..093ebf77eba 100644 --- a/homeassistant/components/envisalink/manifest.json +++ b/homeassistant/components/envisalink/manifest.json @@ -1,7 +1,7 @@ { "domain": "envisalink", "name": "Envisalink", - "codeowners": ["@ufodone"], + "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/envisalink", "iot_class": "local_push", "loggers": ["pyenvisalink"], diff --git a/homeassistant/components/epson/media_player.py b/homeassistant/components/epson/media_player.py index 1f80be9fe06..1f401ed0a7d 100644 --- a/homeassistant/components/epson/media_player.py +++ b/homeassistant/components/epson/media_player.py @@ -37,7 +37,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import ( + DeviceInfo, + async_get as async_get_device_registry, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_registry import async_get as async_get_entity_registry @@ -55,8 +58,7 @@ async def async_setup_entry( projector: Projector = hass.data[DOMAIN][config_entry.entry_id] projector_entity = EpsonProjectorMediaPlayer( projector=projector, - name=config_entry.title, - unique_id=config_entry.unique_id, + unique_id=config_entry.unique_id or config_entry.entry_id, entry=config_entry, ) async_add_entities([projector_entity], True) @@ -71,6 +73,9 @@ async def async_setup_entry( class EpsonProjectorMediaPlayer(MediaPlayerEntity): """Representation of Epson Projector Device.""" + _attr_has_entity_name = True + _attr_name = None + _attr_supported_features = ( MediaPlayerEntityFeature.TURN_ON | MediaPlayerEntityFeature.TURN_OFF @@ -82,38 +87,38 @@ class EpsonProjectorMediaPlayer(MediaPlayerEntity): ) def __init__( - self, projector: Projector, name: str, unique_id: str | None, entry: ConfigEntry + self, projector: Projector, unique_id: str, entry: ConfigEntry ) -> None: """Initialize entity to control Epson projector.""" self._projector = projector self._entry = entry - self._attr_name = name self._attr_available = False self._cmode = None self._attr_source_list = list(DEFAULT_SOURCES.values()) self._attr_unique_id = unique_id - if unique_id: - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, unique_id)}, - manufacturer="Epson", - model="Epson", - name="Epson projector", - via_device=(DOMAIN, unique_id), - ) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + manufacturer="Epson", + model="Epson", + ) async def set_unique_id(self) -> bool: """Set unique id for projector config entry.""" _LOGGER.debug("Setting unique_id for projector") - if self.unique_id: + if self._entry.unique_id: return False if uid := await self._projector.get_serial_number(): self.hass.config_entries.async_update_entry(self._entry, unique_id=uid) - registry = async_get_entity_registry(self.hass) - old_entity_id = registry.async_get_entity_id( + ent_reg = async_get_entity_registry(self.hass) + old_entity_id = ent_reg.async_get_entity_id( "media_player", DOMAIN, self._entry.entry_id ) if old_entity_id is not None: - registry.async_update_entity(old_entity_id, new_unique_id=uid) + ent_reg.async_update_entity(old_entity_id, new_unique_id=uid) + dev_reg = async_get_device_registry(self.hass) + device = dev_reg.async_get_device({(DOMAIN, self._entry.entry_id)}) + if device is not None: + dev_reg.async_update_device(device.id, new_identifiers={(DOMAIN, uid)}) self.hass.async_create_task( self.hass.config_entries.async_reload(self._entry.entry_id) ) diff --git a/homeassistant/components/epson/strings.json b/homeassistant/components/epson/strings.json index 4e3780322e9..94544c32d1d 100644 --- a/homeassistant/components/epson/strings.json +++ b/homeassistant/components/epson/strings.json @@ -5,6 +5,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "name": "[%key:common::config_flow::data::name%]" + }, + "data_description": { + "host": "The hostname or IP address of your Epson projector." } } }, diff --git a/homeassistant/components/eq3btsmart/__init__.py b/homeassistant/components/eq3btsmart/__init__.py deleted file mode 100644 index f32eba6944f..00000000000 --- a/homeassistant/components/eq3btsmart/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The eq3btsmart component.""" diff --git a/homeassistant/components/eq3btsmart/climate.py b/homeassistant/components/eq3btsmart/climate.py deleted file mode 100644 index 700bc61293f..00000000000 --- a/homeassistant/components/eq3btsmart/climate.py +++ /dev/null @@ -1,192 +0,0 @@ -"""Support for eQ-3 Bluetooth Smart thermostats.""" -from __future__ import annotations - -import logging -from typing import Any - -import eq3bt as eq3 -import voluptuous as vol - -from homeassistant.components.climate import ( - PLATFORM_SCHEMA, - PRESET_AWAY, - PRESET_BOOST, - PRESET_NONE, - ClimateEntity, - ClimateEntityFeature, - HVACMode, -) -from homeassistant.const import ( - ATTR_TEMPERATURE, - CONF_DEVICES, - CONF_MAC, - PRECISION_HALVES, - UnitOfTemperature, -) -from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.device_registry import format_mac -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from .const import PRESET_CLOSED, PRESET_NO_HOLD, PRESET_OPEN, PRESET_PERMANENT_HOLD - -_LOGGER = logging.getLogger(__name__) - -STATE_BOOST = "boost" - -ATTR_STATE_WINDOW_OPEN = "window_open" -ATTR_STATE_VALVE = "valve" -ATTR_STATE_LOCKED = "is_locked" -ATTR_STATE_LOW_BAT = "low_battery" -ATTR_STATE_AWAY_END = "away_end" - -EQ_TO_HA_HVAC = { - eq3.Mode.Open: HVACMode.HEAT, - eq3.Mode.Closed: HVACMode.OFF, - eq3.Mode.Auto: HVACMode.AUTO, - eq3.Mode.Manual: HVACMode.HEAT, - eq3.Mode.Boost: HVACMode.AUTO, - eq3.Mode.Away: HVACMode.HEAT, -} - -HA_TO_EQ_HVAC = { - HVACMode.HEAT: eq3.Mode.Manual, - HVACMode.OFF: eq3.Mode.Closed, - HVACMode.AUTO: eq3.Mode.Auto, -} - -EQ_TO_HA_PRESET = { - eq3.Mode.Boost: PRESET_BOOST, - eq3.Mode.Away: PRESET_AWAY, - eq3.Mode.Manual: PRESET_PERMANENT_HOLD, - eq3.Mode.Auto: PRESET_NO_HOLD, - eq3.Mode.Open: PRESET_OPEN, - eq3.Mode.Closed: PRESET_CLOSED, -} - -HA_TO_EQ_PRESET = { - PRESET_BOOST: eq3.Mode.Boost, - PRESET_AWAY: eq3.Mode.Away, - PRESET_PERMANENT_HOLD: eq3.Mode.Manual, - PRESET_NO_HOLD: eq3.Mode.Auto, - PRESET_OPEN: eq3.Mode.Open, - PRESET_CLOSED: eq3.Mode.Closed, -} - - -DEVICE_SCHEMA = vol.Schema({vol.Required(CONF_MAC): cv.string}) - -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - {vol.Required(CONF_DEVICES): vol.Schema({cv.string: DEVICE_SCHEMA})} -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the eQ-3 BLE thermostats.""" - devices = [] - - for name, device_cfg in config[CONF_DEVICES].items(): - mac = device_cfg[CONF_MAC] - devices.append(EQ3BTSmartThermostat(mac, name)) - - add_entities(devices, True) - - -class EQ3BTSmartThermostat(ClimateEntity): - """Representation of an eQ-3 Bluetooth Smart thermostat.""" - - _attr_hvac_modes = list(HA_TO_EQ_HVAC) - _attr_precision = PRECISION_HALVES - _attr_preset_modes = list(HA_TO_EQ_PRESET) - _attr_supported_features = ( - ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE - ) - _attr_temperature_unit = UnitOfTemperature.CELSIUS - - def __init__(self, mac: str, name: str) -> None: - """Initialize the thermostat.""" - # We want to avoid name clash with this module. - self._attr_name = name - self._attr_unique_id = format_mac(mac) - self._thermostat = eq3.Thermostat(mac) - - @property - def available(self) -> bool: - """Return if thermostat is available.""" - return self._thermostat.mode >= 0 - - @property - def current_temperature(self): - """Can not report temperature, so return target_temperature.""" - return self.target_temperature - - @property - def target_temperature(self): - """Return the temperature we try to reach.""" - return self._thermostat.target_temperature - - def set_temperature(self, **kwargs: Any) -> None: - """Set new target temperature.""" - if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: - return - self._thermostat.target_temperature = temperature - - @property - def hvac_mode(self) -> HVACMode: - """Return the current operation mode.""" - if self._thermostat.mode < 0: - return HVACMode.OFF - return EQ_TO_HA_HVAC[self._thermostat.mode] - - def set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set operation mode.""" - self._thermostat.mode = HA_TO_EQ_HVAC[hvac_mode] - - @property - def min_temp(self): - """Return the minimum temperature.""" - return self._thermostat.min_temp - - @property - def max_temp(self): - """Return the maximum temperature.""" - return self._thermostat.max_temp - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return the device specific state attributes.""" - return { - ATTR_STATE_AWAY_END: self._thermostat.away_end, - ATTR_STATE_LOCKED: self._thermostat.locked, - ATTR_STATE_LOW_BAT: self._thermostat.low_battery, - ATTR_STATE_VALVE: self._thermostat.valve_state, - ATTR_STATE_WINDOW_OPEN: self._thermostat.window_open, - } - - @property - def preset_mode(self) -> str | None: - """Return the current preset mode, e.g., home, away, temp. - - Requires ClimateEntityFeature.PRESET_MODE. - """ - return EQ_TO_HA_PRESET.get(self._thermostat.mode) - - def set_preset_mode(self, preset_mode: str) -> None: - """Set new preset mode.""" - if preset_mode == PRESET_NONE: - self.set_hvac_mode(HVACMode.HEAT) - self._thermostat.mode = HA_TO_EQ_PRESET[preset_mode] - - def update(self) -> None: - """Update the data from the thermostat.""" - - try: - self._thermostat.update() - except eq3.BackendException as ex: - _LOGGER.warning("Updating the state failed: %s", ex) diff --git a/homeassistant/components/eq3btsmart/const.py b/homeassistant/components/eq3btsmart/const.py deleted file mode 100644 index af90acbde55..00000000000 --- a/homeassistant/components/eq3btsmart/const.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Constants for EQ3 Bluetooth Smart Radiator Valves.""" - -PRESET_PERMANENT_HOLD = "permanent_hold" -PRESET_NO_HOLD = "no_hold" -PRESET_OPEN = "open" -PRESET_CLOSED = "closed" diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json deleted file mode 100644 index 8a976b25c7a..00000000000 --- a/homeassistant/components/eq3btsmart/manifest.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "domain": "eq3btsmart", - "name": "eQ-3 Bluetooth Smart Thermostats", - "codeowners": ["@rytilahti"], - "dependencies": ["bluetooth_adapters"], - "documentation": "https://www.home-assistant.io/integrations/eq3btsmart", - "iot_class": "local_polling", - "loggers": ["bleak", "eq3bt"], - "requirements": ["construct==2.10.68", "python-eq3bt==0.2"] -} diff --git a/homeassistant/components/esphome/bluetooth/__init__.py b/homeassistant/components/esphome/bluetooth/__init__.py index 9ef298145d3..6936afac714 100644 --- a/homeassistant/components/esphome/bluetooth/__init__.py +++ b/homeassistant/components/esphome/bluetooth/__init__.py @@ -1,8 +1,11 @@ """Bluetooth support for esphome.""" from __future__ import annotations +import asyncio +from collections.abc import Coroutine from functools import partial import logging +from typing import Any from aioesphomeapi import APIClient, BluetoothProxyFeature @@ -43,6 +46,13 @@ def _async_can_connect( return can_connect +@hass_callback +def _async_unload(unload_callbacks: list[CALLBACK_TYPE]) -> None: + """Cancel all the callbacks on unload.""" + for callback in unload_callbacks: + callback() + + async def async_connect_scanner( hass: HomeAssistant, entry: ConfigEntry, @@ -92,27 +102,36 @@ async def async_connect_scanner( hass, source, entry.title, new_info_callback, connector, connectable ) client_data.scanner = scanner + coros: list[Coroutine[Any, Any, CALLBACK_TYPE]] = [] + # These calls all return a callback that can be used to unsubscribe + # but we never unsubscribe so we don't care about the return value + if connectable: # If its connectable be sure not to register the scanner # until we know the connection is fully setup since otherwise # there is a race condition where the connection can fail - await cli.subscribe_bluetooth_connections_free( - bluetooth_device.async_update_ble_connection_limits + coros.append( + cli.subscribe_bluetooth_connections_free( + bluetooth_device.async_update_ble_connection_limits + ) ) - unload_callbacks = [ - async_register_scanner(hass, scanner, connectable), - scanner.async_setup(), - ] + if feature_flags & BluetoothProxyFeature.RAW_ADVERTISEMENTS: - await cli.subscribe_bluetooth_le_raw_advertisements( - scanner.async_on_raw_advertisements + coros.append( + cli.subscribe_bluetooth_le_raw_advertisements( + scanner.async_on_raw_advertisements + ) ) else: - await cli.subscribe_bluetooth_le_advertisements(scanner.async_on_advertisement) + coros.append( + cli.subscribe_bluetooth_le_advertisements(scanner.async_on_advertisement) + ) - @hass_callback - def _async_unload() -> None: - for callback in unload_callbacks: - callback() - - return _async_unload + await asyncio.gather(*coros) + return partial( + _async_unload, + [ + async_register_scanner(hass, scanner, connectable), + scanner.async_setup(), + ], + ) diff --git a/homeassistant/components/esphome/bluetooth/client.py b/homeassistant/components/esphome/bluetooth/client.py index 970e866b27b..96f1bce686a 100644 --- a/homeassistant/components/esphome/bluetooth/client.py +++ b/homeassistant/components/esphome/bluetooth/client.py @@ -22,6 +22,7 @@ from aioesphomeapi import ( APIClient, APIVersion, BLEConnectionError, + BluetoothConnectionDroppedError, BluetoothProxyFeature, DeviceInfo, ) @@ -30,7 +31,6 @@ from aioesphomeapi.core import ( BluetoothGATTAPIError, TimeoutAPIError, ) -from async_interrupt import interrupt from bleak.backends.characteristic import BleakGATTCharacteristic from bleak.backends.client import BaseBleakClient, NotifyCallback from bleak.backends.device import BLEDevice @@ -68,41 +68,25 @@ def mac_to_int(address: str) -> int: return int(address.replace(":", ""), 16) -def verify_connected(func: _WrapFuncType) -> _WrapFuncType: - """Define a wrapper throw BleakError if not connected.""" - - async def _async_wrap_bluetooth_connected_operation( - self: ESPHomeClient, *args: Any, **kwargs: Any - ) -> Any: - # pylint: disable=protected-access - loop = self._loop - disconnected_futures = self._disconnected_futures - disconnected_future = loop.create_future() - disconnected_futures.add(disconnected_future) - ble_device = self._ble_device - disconnect_message = ( - f"{self._source_name }: {ble_device.name} - {ble_device.address}: " - "Disconnected during operation" - ) - try: - async with interrupt(disconnected_future, BleakError, disconnect_message): - return await func(self, *args, **kwargs) - finally: - disconnected_futures.discard(disconnected_future) - - return cast(_WrapFuncType, _async_wrap_bluetooth_connected_operation) - - def api_error_as_bleak_error(func: _WrapFuncType) -> _WrapFuncType: """Define a wrapper throw esphome api errors as BleakErrors.""" async def _async_wrap_bluetooth_operation( self: ESPHomeClient, *args: Any, **kwargs: Any ) -> Any: + # pylint: disable=protected-access try: return await func(self, *args, **kwargs) except TimeoutAPIError as err: raise asyncio.TimeoutError(str(err)) from err + except BluetoothConnectionDroppedError as ex: + _LOGGER.debug( + "%s: BLE device disconnected during %s operation", + self._description, + func.__name__, + ) + self._async_ble_device_disconnected() + raise BleakError(str(ex)) from ex except BluetoothGATTAPIError as ex: # If the device disconnects in the middle of an operation # be sure to mark it as disconnected so any library using @@ -113,12 +97,9 @@ def api_error_as_bleak_error(func: _WrapFuncType) -> _WrapFuncType: # before the callback is delivered. if ex.error.error == -1: - # pylint: disable=protected-access _LOGGER.debug( - "%s: %s - %s: BLE device disconnected during %s operation", - self._source_name, - self._ble_device.name, - self._ble_device.address, + "%s: BLE device disconnected during %s operation", + self._description, func.__name__, ) self._async_ble_device_disconnected() @@ -140,7 +121,7 @@ class ESPHomeClientData: api_version: APIVersion title: str scanner: ESPHomeScanner | None - disconnect_callbacks: list[Callable[[], None]] = field(default_factory=list) + disconnect_callbacks: set[Callable[[], None]] = field(default_factory=set) class ESPHomeClient(BaseBleakClient): @@ -159,10 +140,11 @@ class ESPHomeClient(BaseBleakClient): assert isinstance(address_or_ble_device, BLEDevice) super().__init__(address_or_ble_device, *args, **kwargs) self._loop = asyncio.get_running_loop() - self._ble_device = address_or_ble_device - self._address_as_int = mac_to_int(self._ble_device.address) - assert self._ble_device.details is not None - self._source = self._ble_device.details["source"] + ble_device = address_or_ble_device + self._ble_device = ble_device + self._address_as_int = mac_to_int(ble_device.address) + assert ble_device.details is not None + self._source = ble_device.details["source"] self._cache = client_data.cache self._bluetooth_device = client_data.bluetooth_device self._client = client_data.client @@ -172,39 +154,22 @@ class ESPHomeClient(BaseBleakClient): self._notify_cancels: dict[ int, tuple[Callable[[], Coroutine[Any, Any, None]], Callable[[], None]] ] = {} - self._disconnected_futures: set[asyncio.Future[None]] = set() self._device_info = client_data.device_info self._feature_flags = device_info.bluetooth_proxy_feature_flags_compat( client_data.api_version ) - self._address_type = address_or_ble_device.details["address_type"] + self._address_type = ble_device.details["address_type"] self._source_name = f"{client_data.title} [{self._source}]" + self._description = ( + f"{self._source_name}: {ble_device.name} - {ble_device.address}" + ) scanner = client_data.scanner assert scanner is not None self._scanner = scanner def __str__(self) -> str: """Return the string representation of the client.""" - return f"ESPHomeClient ({self.address})" - - def _unsubscribe_connection_state(self) -> None: - """Unsubscribe from connection state updates.""" - if not self._cancel_connection_state: - return - try: - self._cancel_connection_state() - except (AssertionError, ValueError) as ex: - _LOGGER.debug( - ( - "%s: %s - %s: Failed to unsubscribe from connection state (likely" - " connection dropped): %s" - ), - self._source_name, - self._ble_device.name, - self._ble_device.address, - ex, - ) - self._cancel_connection_state = None + return f"ESPHomeClient ({self._description})" def _async_disconnected_cleanup(self) -> None: """Clean up on disconnect.""" @@ -213,34 +178,25 @@ class ESPHomeClient(BaseBleakClient): for _, notify_abort in self._notify_cancels.values(): notify_abort() self._notify_cancels.clear() - for future in self._disconnected_futures: - if not future.done(): - future.set_result(None) - self._disconnected_futures.clear() - self._unsubscribe_connection_state() + self._disconnect_callbacks.discard(self._async_esp_disconnected) + if self._cancel_connection_state: + self._cancel_connection_state() + self._cancel_connection_state = None def _async_ble_device_disconnected(self) -> None: """Handle the BLE device disconnecting from the ESP.""" was_connected = self._is_connected self._async_disconnected_cleanup() if was_connected: - _LOGGER.debug( - "%s: %s - %s: BLE device disconnected", - self._source_name, - self._ble_device.name, - self._ble_device.address, - ) + _LOGGER.debug("%s: BLE device disconnected", self._description) self._async_call_bleak_disconnected_callback() def _async_esp_disconnected(self) -> None: """Handle the esp32 client disconnecting from us.""" - _LOGGER.debug( - "%s: %s - %s: ESP device disconnected", - self._source_name, - self._ble_device.name, - self._ble_device.address, - ) - self._disconnect_callbacks.remove(self._async_esp_disconnected) + _LOGGER.debug("%s: ESP device disconnected", self._description) + # Calling _async_ble_device_disconnected calls + # _async_disconnected_cleanup which will also remove + # the disconnect callbacks self._async_ble_device_disconnected() def _async_call_bleak_disconnected_callback(self) -> None: @@ -258,10 +214,8 @@ class ESPHomeClient(BaseBleakClient): ) -> None: """Handle a connect or disconnect.""" _LOGGER.debug( - "%s: %s - %s: Connection state changed to connected=%s mtu=%s error=%s", - self._source_name, - self._ble_device.name, - self._ble_device.address, + "%s: Connection state changed to connected=%s mtu=%s error=%s", + self._description, connected, mtu, error, @@ -300,12 +254,10 @@ class ESPHomeClient(BaseBleakClient): return _LOGGER.debug( - "%s: %s - %s: connected, registering for disconnected callbacks", - self._source_name, - self._ble_device.name, - self._ble_device.address, + "%s: connected, registering for disconnected callbacks", + self._description, ) - self._disconnect_callbacks.append(self._async_esp_disconnected) + self._disconnect_callbacks.add(self._async_esp_disconnected) connected_future.set_result(connected) @api_error_as_bleak_error @@ -403,10 +355,8 @@ class ESPHomeClient(BaseBleakClient): if bluetooth_device.ble_connections_free: return _LOGGER.debug( - "%s: %s - %s: Out of connection slots, waiting for a free one", - self._source_name, - self._ble_device.name, - self._ble_device.address, + "%s: Out of connection slots, waiting for a free one", + self._description, ) async with asyncio.timeout(timeout): await bluetooth_device.wait_for_ble_connections_free() @@ -421,7 +371,6 @@ class ESPHomeClient(BaseBleakClient): """Get ATT MTU size for active connection.""" return self._mtu or DEFAULT_MTU - @verify_connected @api_error_as_bleak_error async def pair(self, *args: Any, **kwargs: Any) -> bool: """Attempt to pair.""" @@ -430,15 +379,15 @@ class ESPHomeClient(BaseBleakClient): "Pairing is not available in this version ESPHome; " f"Upgrade the ESPHome version on the {self._device_info.name} device." ) + self._raise_if_not_connected() response = await self._client.bluetooth_device_pair(self._address_as_int) if response.paired: return True _LOGGER.error( - "Pairing with %s failed due to error: %s", self.address, response.error + "%s: Pairing failed due to error: %s", self._description, response.error ) return False - @verify_connected @api_error_as_bleak_error async def unpair(self) -> bool: """Attempt to unpair.""" @@ -447,11 +396,12 @@ class ESPHomeClient(BaseBleakClient): "Unpairing is not available in this version ESPHome; " f"Upgrade the ESPHome version on the {self._device_info.name} device." ) + self._raise_if_not_connected() response = await self._client.bluetooth_device_unpair(self._address_as_int) if response.success: return True _LOGGER.error( - "Unpairing with %s failed due to error: %s", self.address, response.error + "%s: Unpairing failed due to error: %s", self._description, response.error ) return False @@ -469,7 +419,6 @@ class ESPHomeClient(BaseBleakClient): dangerous_use_bleak_cache=dangerous_use_bleak_cache, **kwargs ) - @verify_connected async def _get_services( self, dangerous_use_bleak_cache: bool = False, **kwargs: Any ) -> BleakGATTServiceCollection: @@ -477,6 +426,7 @@ class ESPHomeClient(BaseBleakClient): Must only be called from get_services or connected """ + self._raise_if_not_connected() address_as_int = self._address_as_int cache = self._cache # If the connection version >= 3, we must use the cache @@ -486,30 +436,14 @@ class ESPHomeClient(BaseBleakClient): self._feature_flags & BluetoothProxyFeature.REMOTE_CACHING or dangerous_use_bleak_cache ) and (cached_services := cache.get_gatt_services_cache(address_as_int)): - _LOGGER.debug( - "%s: %s - %s: Cached services hit", - self._source_name, - self._ble_device.name, - self._ble_device.address, - ) + _LOGGER.debug("%s: Cached services hit", self._description) self.services = cached_services return self.services - _LOGGER.debug( - "%s: %s - %s: Cached services miss", - self._source_name, - self._ble_device.name, - self._ble_device.address, - ) + _LOGGER.debug("%s: Cached services miss", self._description) esphome_services = await self._client.bluetooth_gatt_get_services( address_as_int ) - _LOGGER.debug( - "%s: %s - %s: Got services: %s", - self._source_name, - self._ble_device.name, - self._ble_device.address, - esphome_services, - ) + _LOGGER.debug("%s: Got services: %s", self._description, esphome_services) max_write_without_response = self.mtu_size - GATT_HEADER_SIZE services = BleakGATTServiceCollection() # type: ignore[no-untyped-call] for service in esphome_services.services: @@ -538,12 +472,7 @@ class ESPHomeClient(BaseBleakClient): raise BleakError("Failed to get services from remote esp") self.services = services - _LOGGER.debug( - "%s: %s - %s: Cached services saved", - self._source_name, - self._ble_device.name, - self._ble_device.address, - ) + _LOGGER.debug("%s: Cached services saved", self._description) cache.set_gatt_services_cache(address_as_int, services) return services @@ -552,16 +481,17 @@ class ESPHomeClient(BaseBleakClient): ) -> BleakGATTCharacteristic: """Resolve a characteristic specifier to a BleakGATTCharacteristic object.""" if (services := self.services) is None: - raise BleakError("Services have not been resolved") + raise BleakError(f"{self._description}: Services have not been resolved") if not isinstance(char_specifier, BleakGATTCharacteristic): characteristic = services.get_characteristic(char_specifier) else: characteristic = char_specifier if not characteristic: - raise BleakError(f"Characteristic {char_specifier} was not found!") + raise BleakError( + f"{self._description}: Characteristic {char_specifier} was not found!" + ) return characteristic - @verify_connected @api_error_as_bleak_error async def clear_cache(self) -> bool: """Clear the GATT cache.""" @@ -575,17 +505,17 @@ class ESPHomeClient(BaseBleakClient): self._device_info.name, ) return True + self._raise_if_not_connected() response = await self._client.bluetooth_device_clear_cache(self._address_as_int) if response.success: return True _LOGGER.error( - "Clear cache failed with %s failed due to error: %s", - self.address, + "%s: Clear cache failed due to error: %s", + self._description, response.error, ) return False - @verify_connected @api_error_as_bleak_error async def read_gatt_char( self, @@ -604,12 +534,12 @@ class ESPHomeClient(BaseBleakClient): Returns: (bytearray) The read data. """ + self._raise_if_not_connected() characteristic = self._resolve_characteristic(char_specifier) return await self._client.bluetooth_gatt_read( self._address_as_int, characteristic.handle, GATT_READ_TIMEOUT ) - @verify_connected @api_error_as_bleak_error async def read_gatt_descriptor(self, handle: int, **kwargs: Any) -> bytearray: """Perform read operation on the specified GATT descriptor. @@ -621,11 +551,11 @@ class ESPHomeClient(BaseBleakClient): Returns: (bytearray) The read data. """ + self._raise_if_not_connected() return await self._client.bluetooth_gatt_read_descriptor( self._address_as_int, handle, GATT_READ_TIMEOUT ) - @verify_connected @api_error_as_bleak_error async def write_gatt_char( self, @@ -644,12 +574,12 @@ class ESPHomeClient(BaseBleakClient): response (bool): If write-with-response operation should be done. Defaults to `False`. """ + self._raise_if_not_connected() characteristic = self._resolve_characteristic(characteristic) await self._client.bluetooth_gatt_write( self._address_as_int, characteristic.handle, bytes(data), response ) - @verify_connected @api_error_as_bleak_error async def write_gatt_descriptor(self, handle: int, data: Buffer) -> None: """Perform a write operation on the specified GATT descriptor. @@ -658,11 +588,11 @@ class ESPHomeClient(BaseBleakClient): handle (int): The handle of the descriptor to read from. data (bytes or bytearray): The data to send. """ + self._raise_if_not_connected() await self._client.bluetooth_gatt_write_descriptor( self._address_as_int, handle, bytes(data) ) - @verify_connected @api_error_as_bleak_error async def start_notify( self, @@ -689,10 +619,11 @@ class ESPHomeClient(BaseBleakClient): callback (function): The function to be called on notification. kwargs: Unused. """ + self._raise_if_not_connected() ble_handle = characteristic.handle if ble_handle in self._notify_cancels: raise BleakError( - "Notifications are already enabled on " + f"{self._description}: Notifications are already enabled on " f"service:{characteristic.service_uuid} " f"characteristic:{characteristic.uuid} " f"handle:{ble_handle}" @@ -702,8 +633,8 @@ class ESPHomeClient(BaseBleakClient): and "indicate" not in characteristic.properties ): raise BleakError( - f"Characteristic {characteristic.uuid} does not have notify or indicate" - " property set." + f"{self._description}: Characteristic {characteristic.uuid} " + "does not have notify or indicate property set." ) self._notify_cancels[ @@ -725,18 +656,13 @@ class ESPHomeClient(BaseBleakClient): cccd_descriptor = characteristic.get_descriptor(CCCD_UUID) if not cccd_descriptor: raise BleakError( - f"Characteristic {characteristic.uuid} does not have a " - "characteristic client config descriptor." + f"{self._description}: Characteristic {characteristic.uuid} " + "does not have a characteristic client config descriptor." ) _LOGGER.debug( - ( - "%s: %s - %s: Writing to CCD descriptor %s for notifications with" - " properties=%s" - ), - self._source_name, - self._ble_device.name, - self._ble_device.address, + "%s: Writing to CCD descriptor %s for notifications with properties=%s", + self._description, cccd_descriptor.handle, characteristic.properties, ) @@ -748,7 +674,6 @@ class ESPHomeClient(BaseBleakClient): wait_for_response=False, ) - @verify_connected @api_error_as_bleak_error async def stop_notify( self, @@ -762,6 +687,7 @@ class ESPHomeClient(BaseBleakClient): specified by either integer handle, UUID or directly by the BleakGATTCharacteristic object representing it. """ + self._raise_if_not_connected() characteristic = self._resolve_characteristic(char_specifier) # Do not raise KeyError if notifications are not enabled on this characteristic # to be consistent with the behavior of the BlueZ backend @@ -769,17 +695,20 @@ class ESPHomeClient(BaseBleakClient): notify_stop, _ = notify_cancel await notify_stop() + def _raise_if_not_connected(self) -> None: + """Raise a BleakError if not connected.""" + if not self._is_connected: + raise BleakError(f"{self._description} is not connected") + def __del__(self) -> None: """Destructor to make sure the connection state is unsubscribed.""" if self._cancel_connection_state: _LOGGER.warning( ( - "%s: %s - %s: ESPHomeClient bleak client was not properly" + "%s: ESPHomeClient bleak client was not properly" " disconnected before destruction" ), - self._source_name, - self._ble_device.name, - self._ble_device.address, + self._description, ) if not self._loop.is_closed(): self._loop.call_soon_threadsafe(self._async_disconnected_cleanup) diff --git a/homeassistant/components/esphome/climate.py b/homeassistant/components/esphome/climate.py index b34714ff89c..08ed2f1109d 100644 --- a/homeassistant/components/esphome/climate.py +++ b/homeassistant/components/esphome/climate.py @@ -164,11 +164,15 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti ) self._attr_min_temp = static_info.visual_min_temperature self._attr_max_temp = static_info.visual_max_temperature + self._attr_min_humidity = round(static_info.visual_min_humidity) + self._attr_max_humidity = round(static_info.visual_max_humidity) features = ClimateEntityFeature(0) if self._static_info.supports_two_point_target_temperature: features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE else: features |= ClimateEntityFeature.TARGET_TEMPERATURE + if self._static_info.supports_target_humidity: + features |= ClimateEntityFeature.TARGET_HUMIDITY if self.preset_modes: features |= ClimateEntityFeature.PRESET_MODE if self.fan_modes: @@ -234,6 +238,14 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti """Return the current temperature.""" return self._state.current_temperature + @property + @esphome_state_property + def current_humidity(self) -> int | None: + """Return the current humidity.""" + if not self._static_info.supports_current_humidity: + return None + return round(self._state.current_humidity) + @property @esphome_state_property def target_temperature(self) -> float | None: @@ -252,6 +264,12 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti """Return the highbound target temperature we try to reach.""" return self._state.target_temperature_high + @property + @esphome_state_property + def target_humidity(self) -> int: + """Return the humidity we try to reach.""" + return round(self._state.target_humidity) + async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature (and operation mode if set).""" data: dict[str, Any] = {"key": self._key} @@ -267,6 +285,10 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti data["target_temperature_high"] = kwargs[ATTR_TARGET_TEMP_HIGH] await self._client.climate_command(**data) + async def async_set_humidity(self, humidity: int) -> None: + """Set new target humidity.""" + await self._client.climate_command(key=self._key, target_humidity=humidity) + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target operation mode.""" await self._client.climate_command( diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index e53200c2e90..d69a30a8c1a 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -107,7 +107,7 @@ class RuntimeEntryData: bluetooth_device: ESPHomeBluetoothDevice | None = None api_version: APIVersion = field(default_factory=APIVersion) cleanup_callbacks: list[Callable[[], None]] = field(default_factory=list) - disconnect_callbacks: list[Callable[[], None]] = field(default_factory=list) + disconnect_callbacks: set[Callable[[], None]] = field(default_factory=set) state_subscriptions: dict[ tuple[type[EntityState], int], Callable[[], None] ] = field(default_factory=dict) @@ -321,7 +321,6 @@ class RuntimeEntryData: current_state_by_type = self.state[state_type] current_state = current_state_by_type.get(key, _SENTINEL) subscription_key = (state_type, key) - debug_enabled = _LOGGER.isEnabledFor(logging.DEBUG) if ( current_state == state and subscription_key not in stale_state @@ -333,21 +332,7 @@ class RuntimeEntryData: and (cast(SensorInfo, entity_info)).force_update ) ): - if debug_enabled: - _LOGGER.debug( - "%s: ignoring duplicate update with key %s: %s", - self.name, - key, - state, - ) return - if debug_enabled: - _LOGGER.debug( - "%s: dispatching update with key %s: %s", - self.name, - key, - state, - ) stale_state.discard(subscription_key) current_state_by_type[key] = state if subscription := self.state_subscriptions.get(subscription_key): @@ -427,3 +412,19 @@ class RuntimeEntryData: if self.original_options == entry.options: return hass.async_create_task(hass.config_entries.async_reload(entry.entry_id)) + + @callback + def async_on_disconnect(self) -> None: + """Call when the entry has been disconnected. + + Safe to call multiple times. + """ + self.available = False + # Make a copy since calling the disconnect callbacks + # may also try to discard/remove themselves. + for disconnect_cb in self.disconnect_callbacks.copy(): + disconnect_cb() + # Make sure to clear the set to give up the reference + # to it and make sure all the callbacks can be GC'd. + self.disconnect_callbacks.clear() + self.disconnect_callbacks = set() diff --git a/homeassistant/components/esphome/enum_mapper.py b/homeassistant/components/esphome/enum_mapper.py index 566f0bc503b..fd09f9a05b6 100644 --- a/homeassistant/components/esphome/enum_mapper.py +++ b/homeassistant/components/esphome/enum_mapper.py @@ -14,9 +14,7 @@ class EsphomeEnumMapper(Generic[_EnumT, _ValT]): def __init__(self, mapping: dict[_EnumT, _ValT]) -> None: """Construct a EsphomeEnumMapper.""" # Add none mapping - augmented_mapping: dict[ - _EnumT | None, _ValT | None - ] = mapping # type: ignore[assignment] + augmented_mapping: dict[_EnumT | None, _ValT | None] = mapping # type: ignore[assignment] augmented_mapping[None] = None self._mapping = augmented_mapping diff --git a/homeassistant/components/esphome/fan.py b/homeassistant/components/esphome/fan.py index a6ca52d6c1a..9942498e12d 100644 --- a/homeassistant/components/esphome/fan.py +++ b/homeassistant/components/esphome/fan.py @@ -117,7 +117,8 @@ class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity): """Return the current speed percentage.""" if not self._supports_speed_levels: return ordered_list_item_to_percentage( - ORDERED_NAMED_FAN_SPEEDS, self._state.speed # type: ignore[misc] + ORDERED_NAMED_FAN_SPEEDS, + self._state.speed, # type: ignore[misc] ) return ranged_value_to_percentage( diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index d2eca7d39f9..79e8a0a06fa 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -1,6 +1,8 @@ """Manager for esphome devices.""" from __future__ import annotations +import asyncio +from collections.abc import Coroutine import logging from typing import TYPE_CHECKING, Any, NamedTuple @@ -9,6 +11,7 @@ from aioesphomeapi import ( APIConnectionError, APIVersion, DeviceInfo as EsphomeDeviceInfo, + EntityInfo, HomeassistantServiceCall, InvalidAuthAPIError, InvalidEncryptionKeyAPIError, @@ -24,8 +27,20 @@ import voluptuous as vol from homeassistant.components import tag, zeroconf from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_DEVICE_ID, CONF_MODE, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import Event, HomeAssistant, ServiceCall, State, callback +from homeassistant.const import ( + ATTR_DEVICE_ID, + CONF_MODE, + EVENT_HOMEASSISTANT_STOP, + EVENT_LOGGING_CHANGED, +) +from homeassistant.core import ( + CALLBACK_TYPE, + Event, + HomeAssistant, + ServiceCall, + State, + callback, +) from homeassistant.exceptions import TemplateError from homeassistant.helpers import template import homeassistant.helpers.config_validation as cv @@ -294,7 +309,7 @@ class ESPHomeManager: event.data["entity_id"], attribute, new_state ) - self.entry_data.disconnect_callbacks.append( + self.entry_data.disconnect_callbacks.add( async_track_state_change_event( hass, [entity_id], send_home_assistant_state_event ) @@ -371,13 +386,20 @@ class ESPHomeManager: stored_device_name = entry.data.get(CONF_DEVICE_NAME) unique_id_is_mac_address = unique_id and ":" in unique_id try: - device_info = await cli.device_info() + results = await asyncio.gather( + cli.device_info(), + cli.list_entities_services(), + ) except APIConnectionError as err: _LOGGER.warning("Error getting device info for %s: %s", self.host, err) # Re-connection logic will trigger after this await cli.disconnect() return + device_info: EsphomeDeviceInfo = results[0] + entity_infos_services: tuple[list[EntityInfo], list[UserService]] = results[1] + entity_infos, services = entity_infos_services + device_mac = format_mac(device_info.mac_address) mac_address_matches = unique_id == device_mac # @@ -438,42 +460,55 @@ class ESPHomeManager: if device_info.name: reconnect_logic.name = device_info.name + self.device_id = _async_setup_device_registry(hass, entry, entry_data) + entry_data.async_update_device_state(hass) + await asyncio.gather( + entry_data.async_update_static_infos( + hass, entry, entity_infos, device_info.mac_address + ), + _setup_services(hass, entry_data, services), + ) + + setup_coros_with_disconnect_callbacks: list[ + Coroutine[Any, Any, CALLBACK_TYPE] + ] = [] if device_info.bluetooth_proxy_feature_flags_compat(cli.api_version): - entry_data.disconnect_callbacks.append( - await async_connect_scanner( + setup_coros_with_disconnect_callbacks.append( + async_connect_scanner( hass, entry, cli, entry_data, self.domain_data.bluetooth_cache ) ) - self.device_id = _async_setup_device_registry(hass, entry, entry_data) - entry_data.async_update_device_state(hass) + if device_info.voice_assistant_version: + setup_coros_with_disconnect_callbacks.append( + cli.subscribe_voice_assistant( + self._handle_pipeline_start, + self._handle_pipeline_stop, + ) + ) try: - entity_infos, services = await cli.list_entities_services() - await entry_data.async_update_static_infos( - hass, entry, entity_infos, device_info.mac_address + setup_results = await asyncio.gather( + *setup_coros_with_disconnect_callbacks, + cli.subscribe_states(entry_data.async_update_state), + cli.subscribe_service_calls(self.async_on_service_call), + cli.subscribe_home_assistant_states(self.async_on_state_subscription), ) - await _setup_services(hass, entry_data, services) - await cli.subscribe_states(entry_data.async_update_state) - await cli.subscribe_service_calls(self.async_on_service_call) - await cli.subscribe_home_assistant_states(self.async_on_state_subscription) - - if device_info.voice_assistant_version: - entry_data.disconnect_callbacks.append( - await cli.subscribe_voice_assistant( - self._handle_pipeline_start, - self._handle_pipeline_stop, - ) - ) - - hass.async_create_task(entry_data.async_save_to_store()) except APIConnectionError as err: _LOGGER.warning("Error getting initial data for %s: %s", self.host, err) # Re-connection logic will trigger after this await cli.disconnect() - else: - _async_check_firmware_version(hass, device_info, entry_data.api_version) - _async_check_using_api_password(hass, device_info, bool(self.password)) + return + + for result_idx in range(len(setup_coros_with_disconnect_callbacks)): + cancel_callback = setup_results[result_idx] + if TYPE_CHECKING: + assert cancel_callback is not None + entry_data.disconnect_callbacks.add(cancel_callback) + + hass.async_create_task(entry_data.async_save_to_store()) + _async_check_firmware_version(hass, device_info, entry_data.api_version) + _async_check_using_api_password(hass, device_info, bool(self.password)) async def on_disconnect(self, expected_disconnect: bool) -> None: """Run disconnect callbacks on API disconnect.""" @@ -487,10 +522,7 @@ class ESPHomeManager: host, expected_disconnect, ) - for disconnect_cb in entry_data.disconnect_callbacks: - disconnect_cb() - entry_data.disconnect_callbacks = [] - entry_data.available = False + entry_data.async_on_disconnect() entry_data.expected_disconnect = expected_disconnect # Mark state as stale so that we will always dispatch # the next state update of that type when the device reconnects @@ -518,6 +550,11 @@ class ESPHomeManager: ): self.entry.async_start_reauth(self.hass) + @callback + def _async_handle_logging_changed(self, _event: Event) -> None: + """Handle when the logging level changes.""" + self.cli.set_debug(_LOGGER.isEnabledFor(logging.DEBUG)) + async def async_start(self) -> None: """Start the esphome connection manager.""" hass = self.hass @@ -534,6 +571,11 @@ class ESPHomeManager: entry_data.cleanup_callbacks.append( hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, self.on_stop) ) + entry_data.cleanup_callbacks.append( + hass.bus.async_listen( + EVENT_LOGGING_CHANGED, self._async_handle_logging_changed + ) + ) reconnect_logic = ReconnectLogic( client=self.cli, @@ -755,10 +797,7 @@ async def cleanup_instance(hass: HomeAssistant, entry: ConfigEntry) -> RuntimeEn """Cleanup the esphome client if it exists.""" domain_data = DomainData.get(hass) data = domain_data.pop_entry_data(entry) - data.available = False - for disconnect_cb in data.disconnect_callbacks: - disconnect_cb() - data.disconnect_callbacks = [] + data.async_on_disconnect() for cleanup_callback in data.cleanup_callbacks: cleanup_callback() await data.async_cleanup() diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 3b5a2050cb8..db9cd9ba72c 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -15,9 +15,8 @@ "iot_class": "local_push", "loggers": ["aioesphomeapi", "noiseprotocol"], "requirements": [ - "async-interrupt==1.1.1", - "aioesphomeapi==18.4.0", - "bluetooth-data-tools==1.14.0", + "aioesphomeapi==19.2.1", + "bluetooth-data-tools==1.16.0", "esphome-dashboard-api==1.2.3" ], "zeroconf": ["_esphomelib._tcp.local."] diff --git a/homeassistant/components/esphome/voice_assistant.py b/homeassistant/components/esphome/voice_assistant.py index bb62d495076..de6b521d980 100644 --- a/homeassistant/components/esphome/voice_assistant.py +++ b/homeassistant/components/esphome/voice_assistant.py @@ -186,16 +186,22 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol): data_to_send = {"text": event.data["tts_input"]} elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: assert event.data is not None - path = event.data["tts_output"]["url"] - url = async_process_play_media_url(self.hass, path) - data_to_send = {"url": url} + tts_output = event.data["tts_output"] + if tts_output: + path = tts_output["url"] + url = async_process_play_media_url(self.hass, path) + data_to_send = {"url": url} - if self.device_info.voice_assistant_version >= 2: - media_id = event.data["tts_output"]["media_id"] - self._tts_task = self.hass.async_create_background_task( - self._send_tts(media_id), "esphome_voice_assistant_tts" - ) + if self.device_info.voice_assistant_version >= 2: + media_id = tts_output["media_id"] + self._tts_task = self.hass.async_create_background_task( + self._send_tts(media_id), "esphome_voice_assistant_tts" + ) + else: + self._tts_done.set() else: + # Empty TTS response + data_to_send = {} self._tts_done.set() elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: assert event.data is not None @@ -301,10 +307,6 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol): if self.transport is None: return - self.handle_event( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, {} - ) - extension, data = await tts.async_get_media_source_audio( self.hass, media_id, @@ -331,11 +333,17 @@ class VoiceAssistantUDPServer(asyncio.DatagramProtocol): audio_bytes = wav_file.readframes(wav_file.getnframes()) - _LOGGER.debug("Sending %d bytes of audio", len(audio_bytes)) + audio_bytes_size = len(audio_bytes) + + _LOGGER.debug("Sending %d bytes of audio", audio_bytes_size) + + self.handle_event( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, {} + ) bytes_per_sample = stt.AudioBitRates.BITRATE_16 // 8 sample_offset = 0 - samples_left = len(audio_bytes) // bytes_per_sample + samples_left = audio_bytes_size // bytes_per_sample while samples_left > 0: bytes_offset = sample_offset * bytes_per_sample diff --git a/homeassistant/components/evil_genius_labs/strings.json b/homeassistant/components/evil_genius_labs/strings.json index 790e9a69c7f..123d164444d 100644 --- a/homeassistant/components/evil_genius_labs/strings.json +++ b/homeassistant/components/evil_genius_labs/strings.json @@ -4,6 +4,9 @@ "user": { "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your Evil Genius Labs device." } } }, diff --git a/homeassistant/components/evohome/__init__.py b/homeassistant/components/evohome/__init__.py index c26310bf61c..9c33b0fbf31 100644 --- a/homeassistant/components/evohome/__init__.py +++ b/homeassistant/components/evohome/__init__.py @@ -124,10 +124,13 @@ def convert_dict(dictionary: dict[str, Any]) -> dict[str, Any]: def convert_key(key: str) -> str: """Convert a string to snake_case.""" string = re.sub(r"[\-\.\s]", "_", str(key)) - return (string[0]).lower() + re.sub( - r"[A-Z]", - lambda matched: f"_{matched.group(0).lower()}", # type:ignore[str-bytes-safe] - string[1:], + return ( + (string[0]).lower() + + re.sub( + r"[A-Z]", + lambda matched: f"_{matched.group(0).lower()}", # type:ignore[str-bytes-safe] + string[1:], + ) ) return { @@ -187,14 +190,14 @@ def _handle_exception(err) -> None: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Create a (EMEA/EU-based) Honeywell TCC system.""" - async def load_auth_tokens(store) -> tuple[dict, dict | None]: + async def load_auth_tokens(store) -> tuple[dict[str, str | dt], dict[str, str]]: app_storage = await store.async_load() tokens = dict(app_storage or {}) if tokens.pop(CONF_USERNAME, None) != config[DOMAIN][CONF_USERNAME]: # any tokens won't be valid, and store might be corrupt await store.async_save({}) - return ({}, None) + return ({}, {}) # evohomeasync2 requires naive/local datetimes as strings if tokens.get(ACCESS_TOKEN_EXPIRES) is not None and ( @@ -202,7 +205,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ): tokens[ACCESS_TOKEN_EXPIRES] = _dt_aware_to_naive(expires) - user_data = tokens.pop(USER_DATA, None) + user_data = tokens.pop(USER_DATA, {}) return (tokens, user_data) store = Store[dict[str, Any]](hass, STORAGE_VER, STORAGE_KEY) @@ -211,7 +214,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: client_v2 = evohomeasync2.EvohomeClient( config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD], - **tokens, + **tokens, # type: ignore[arg-type] session=async_get_clientsession(hass), ) @@ -250,7 +253,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: client_v1 = evohomeasync.EvohomeClient( client_v2.username, client_v2.password, - user_data=user_data, + session_id=user_data.get("sessionId") if user_data else None, # STORAGE_VER 1 session=async_get_clientsession(hass), ) @@ -422,7 +425,7 @@ class EvoBroker: self.tcs_utc_offset = timedelta( minutes=client.locations[loc_idx].timeZone[UTC_OFFSET] ) - self.temps: dict[str, Any] | None = {} + self.temps: dict[str, float | None] = {} async def save_auth_tokens(self) -> None: """Save access tokens and session IDs to the store for later use.""" @@ -438,14 +441,12 @@ class EvoBroker: ACCESS_TOKEN_EXPIRES: access_token_expires.isoformat(), } - if self.client_v1 and self.client_v1.user_data: - user_id = self.client_v1.user_data["userInfo"]["userID"] # type: ignore[index] + if self.client_v1: app_storage[USER_DATA] = { # type: ignore[assignment] - "userInfo": {"userID": user_id}, - "sessionId": self.client_v1.user_data["sessionId"], - } + "sessionId": self.client_v1.broker.session_id, + } # this is the schema for STORAGE_VER == 1 else: - app_storage[USER_DATA] = None + app_storage[USER_DATA] = {} # type: ignore[assignment] await self._store.async_save(app_storage) @@ -465,16 +466,13 @@ class EvoBroker: async def _update_v1_api_temps(self, *args, **kwargs) -> None: """Get the latest high-precision temperatures of the default Location.""" - assert self.client_v1 + assert self.client_v1 # mypy check - def get_session_id(client_v1) -> str | None: - user_data = client_v1.user_data if client_v1 else None - return user_data.get("sessionId") if user_data else None - - session_id = get_session_id(self.client_v1) + session_id = self.client_v1.broker.session_id # maybe receive a new session_id? + self.temps = {} # these are now stale, will fall back to v2 temps try: - temps = list(await self.client_v1.temperatures(force_refresh=True)) + temps = await self.client_v1.get_temperatures() except evohomeasync.InvalidSchema as exc: _LOGGER.warning( @@ -486,7 +484,7 @@ class EvoBroker: ), exc, ) - self.temps = self.client_v1 = None + self.client_v1 = None except evohomeasync.EvohomeError as exc: _LOGGER.warning( @@ -498,7 +496,6 @@ class EvoBroker: ), exc, ) - self.temps = None # these are now stale, will fall back to v2 temps else: if ( @@ -510,19 +507,20 @@ class EvoBroker: "the v1 API's default location (there is more than one location), " "so the high-precision feature will be disabled until next restart" ) - self.temps = self.client_v1 = None + self.client_v1 = None else: self.temps = {str(i["id"]): i["temp"] for i in temps} finally: - if session_id != get_session_id(self.client_v1): + if self.client_v1 and session_id != self.client_v1.broker.session_id: await self.save_auth_tokens() _LOGGER.debug("Temperatures = %s", self.temps) async def _update_v2_api_state(self, *args, **kwargs) -> None: """Get the latest modes, temperatures, setpoints of a Location.""" - access_token = self.client.access_token + + access_token = self.client.access_token # maybe receive a new token? loc_idx = self.params[CONF_LOCATION_IDX] try: @@ -533,9 +531,9 @@ class EvoBroker: async_dispatcher_send(self.hass, DOMAIN) _LOGGER.debug("Status = %s", status) - - if access_token != self.client.access_token: - await self.save_auth_tokens() + finally: + if access_token != self.client.access_token: + await self.save_auth_tokens() async def async_update(self, *args, **kwargs) -> None: """Get the latest state data of an entire Honeywell TCC Location. @@ -559,6 +557,8 @@ class EvoDevice(Entity): _attr_should_poll = False + _evo_id: str + def __init__(self, evo_broker, evo_device) -> None: """Initialize the evohome entity.""" self._evo_device = evo_device @@ -620,18 +620,10 @@ class EvoChild(EvoDevice): @property def current_temperature(self) -> float | None: """Return the current temperature of a Zone.""" - if self._evo_device.TYPE == "domesticHotWater": - dev_id = self._evo_device.dhwId - else: - dev_id = self._evo_device.zoneId - if self._evo_broker.temps and self._evo_broker.temps[dev_id] is not None: - return self._evo_broker.temps[dev_id] - - if self._evo_device.temperatureStatus["isAvailable"]: - return self._evo_device.temperatureStatus["temperature"] - - return None + if self._evo_broker.temps.get(self._evo_id) is not None: + return self._evo_broker.temps[self._evo_id] + return self._evo_device.temperature @property def setpoints(self) -> dict[str, Any]: @@ -676,7 +668,7 @@ class EvoChild(EvoDevice): switchpoint_time_of_day = dt_util.parse_datetime( f"{sp_date}T{switchpoint['TimeOfDay']}" ) - assert switchpoint_time_of_day + assert switchpoint_time_of_day # mypy check dt_aware = _dt_evo_to_aware( switchpoint_time_of_day, self._evo_broker.tcs_utc_offset ) diff --git a/homeassistant/components/evohome/climate.py b/homeassistant/components/evohome/climate.py index fb608262a7d..dea5676d332 100644 --- a/homeassistant/components/evohome/climate.py +++ b/homeassistant/components/evohome/climate.py @@ -150,6 +150,7 @@ class EvoZone(EvoChild, EvoClimateEntity): self._attr_unique_id = f"{evo_device.zoneId}z" else: self._attr_unique_id = evo_device.zoneId + self._evo_id = evo_device.zoneId self._attr_name = evo_device.name @@ -189,24 +190,27 @@ class EvoZone(EvoChild, EvoClimateEntity): ) @property - def hvac_mode(self) -> HVACMode: + def hvac_mode(self) -> HVACMode | None: """Return the current operating mode of a Zone.""" - if self._evo_tcs.systemModeStatus["mode"] in (EVO_AWAY, EVO_HEATOFF): + if self._evo_tcs.system_mode in (EVO_AWAY, EVO_HEATOFF): return HVACMode.AUTO - is_off = self.target_temperature <= self.min_temp - return HVACMode.OFF if is_off else HVACMode.HEAT + if self.target_temperature is None: + return None + if self.target_temperature <= self.min_temp: + return HVACMode.OFF + return HVACMode.HEAT @property - def target_temperature(self) -> float: + def target_temperature(self) -> float | None: """Return the target temperature of a Zone.""" - return self._evo_device.setpointStatus["targetHeatTemperature"] + return self._evo_device.target_heat_temperature @property def preset_mode(self) -> str | None: """Return the current preset mode, e.g., home, away, temp.""" - if self._evo_tcs.systemModeStatus["mode"] in (EVO_AWAY, EVO_HEATOFF): - return TCS_PRESET_TO_HA.get(self._evo_tcs.systemModeStatus["mode"]) - return EVO_PRESET_TO_HA.get(self._evo_device.setpointStatus["setpointMode"]) + if self._evo_tcs.system_mode in (EVO_AWAY, EVO_HEATOFF): + return TCS_PRESET_TO_HA.get(self._evo_tcs.system_mode) + return EVO_PRESET_TO_HA.get(self._evo_device.mode) @property def min_temp(self) -> float: @@ -214,7 +218,7 @@ class EvoZone(EvoChild, EvoClimateEntity): The default is 5, but is user-configurable within 5-35 (in Celsius). """ - return self._evo_device.setpointCapabilities["minHeatSetpoint"] + return self._evo_device.min_heat_setpoint @property def max_temp(self) -> float: @@ -222,17 +226,17 @@ class EvoZone(EvoChild, EvoClimateEntity): The default is 35, but is user-configurable within 5-35 (in Celsius). """ - return self._evo_device.setpointCapabilities["maxHeatSetpoint"] + return self._evo_device.max_heat_setpoint async def async_set_temperature(self, **kwargs: Any) -> None: """Set a new target temperature.""" temperature = kwargs["temperature"] if (until := kwargs.get("until")) is None: - if self._evo_device.setpointStatus["setpointMode"] == EVO_FOLLOW: + if self._evo_device.mode == EVO_FOLLOW: await self._update_schedule() until = dt_util.parse_datetime(self.setpoints.get("next_sp_from", "")) - elif self._evo_device.setpointStatus["setpointMode"] == EVO_TEMPOVER: + elif self._evo_device.mode == EVO_TEMPOVER: until = dt_util.parse_datetime(self._evo_device.setpointStatus["until"]) until = dt_util.as_utc(until) if until else None @@ -272,7 +276,7 @@ class EvoZone(EvoChild, EvoClimateEntity): await self._evo_broker.call_client_api(self._evo_device.reset_mode()) return - temperature = self._evo_device.setpointStatus["targetHeatTemperature"] + temperature = self._evo_device.target_heat_temperature if evo_preset_mode == EVO_TEMPOVER: await self._update_schedule() @@ -311,6 +315,7 @@ class EvoController(EvoClimateEntity): super().__init__(evo_broker, evo_device) self._attr_unique_id = evo_device.systemId + self._evo_id = evo_device.systemId self._attr_name = evo_device.location.name modes = [m["systemMode"] for m in evo_broker.config["allowedSystemModes"]] @@ -352,7 +357,7 @@ class EvoController(EvoClimateEntity): @property def hvac_mode(self) -> HVACMode: """Return the current operating mode of a Controller.""" - tcs_mode = self._evo_tcs.systemModeStatus["mode"] + tcs_mode = self._evo_tcs.system_mode return HVACMode.OFF if tcs_mode == EVO_HEATOFF else HVACMode.HEAT @property @@ -362,16 +367,18 @@ class EvoController(EvoClimateEntity): Controllers do not have a current temp, but one is expected by HA. """ temps = [ - z.temperatureStatus["temperature"] + z.temperature for z in self._evo_tcs.zones.values() - if z.temperatureStatus["isAvailable"] + if z.temperature is not None ] return round(sum(temps) / len(temps), 1) if temps else None @property def preset_mode(self) -> str | None: """Return the current preset mode, e.g., home, away, temp.""" - return TCS_PRESET_TO_HA.get(self._evo_tcs.systemModeStatus["mode"]) + if not self._evo_tcs.system_mode: + return None + return TCS_PRESET_TO_HA.get(self._evo_tcs.system_mode) async def async_set_temperature(self, **kwargs: Any) -> None: """Raise exception as Controllers don't have a target temperature.""" diff --git a/homeassistant/components/evohome/manifest.json b/homeassistant/components/evohome/manifest.json index 58efb2c25b2..769c8e597cd 100644 --- a/homeassistant/components/evohome/manifest.json +++ b/homeassistant/components/evohome/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/evohome", "iot_class": "cloud_polling", "loggers": ["evohomeasync", "evohomeasync2"], - "requirements": ["evohome-async==0.4.6"] + "requirements": ["evohome-async==0.4.9"] } diff --git a/homeassistant/components/evohome/services.yaml b/homeassistant/components/evohome/services.yaml index a16395ad6c0..60dcf37ebb0 100644 --- a/homeassistant/components/evohome/services.yaml +++ b/homeassistant/components/evohome/services.yaml @@ -24,7 +24,9 @@ set_system_mode: object: reset_system: + refresh_system: + set_zone_override: fields: entity_id: diff --git a/homeassistant/components/evohome/strings.json b/homeassistant/components/evohome/strings.json index aa38ee170a5..9e88c9bb031 100644 --- a/homeassistant/components/evohome/strings.json +++ b/homeassistant/components/evohome/strings.json @@ -6,7 +6,7 @@ "fields": { "mode": { "name": "[%key:common::config_flow::data::mode%]", - "description": "Mode to set thermostat." + "description": "Mode to set the system to." }, "period": { "name": "Period", diff --git a/homeassistant/components/evohome/water_heater.py b/homeassistant/components/evohome/water_heater.py index 5d49e9b46ec..51617bdf1cf 100644 --- a/homeassistant/components/evohome/water_heater.py +++ b/homeassistant/components/evohome/water_heater.py @@ -68,6 +68,7 @@ class EvoDHW(EvoChild, WaterHeaterEntity): super().__init__(evo_broker, evo_device) self._attr_unique_id = evo_device.dhwId + self._evo_id = evo_device.dhwId self._attr_precision = ( PRECISION_TENTHS if evo_broker.client_v1 else PRECISION_WHOLE @@ -79,15 +80,15 @@ class EvoDHW(EvoChild, WaterHeaterEntity): @property def current_operation(self) -> str: """Return the current operating mode (Auto, On, or Off).""" - if self._evo_device.stateStatus["mode"] == EVO_FOLLOW: + if self._evo_device.mode == EVO_FOLLOW: return STATE_AUTO - return EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]] + return EVO_STATE_TO_HA[self._evo_device.state] @property def is_away_mode_on(self): """Return True if away mode is on.""" - is_off = EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]] == STATE_OFF - is_permanent = self._evo_device.stateStatus["mode"] == EVO_PERMOVER + is_off = EVO_STATE_TO_HA[self._evo_device.state] == STATE_OFF + is_permanent = self._evo_device.mode == EVO_PERMOVER return is_off and is_permanent async def async_set_operation_mode(self, operation_mode: str) -> None: diff --git a/homeassistant/components/faa_delays/binary_sensor.py b/homeassistant/components/faa_delays/binary_sensor.py index 5cbb206f223..c72fedaf59a 100644 --- a/homeassistant/components/faa_delays/binary_sensor.py +++ b/homeassistant/components/faa_delays/binary_sensor.py @@ -1,44 +1,88 @@ """Platform for FAA Delays sensor component.""" from __future__ import annotations +from collections.abc import Callable, Mapping +from dataclasses import dataclass from typing import Any +from faadelays import Airport + from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import FAADataUpdateCoordinator from .const import DOMAIN -FAA_BINARY_SENSORS: tuple[BinarySensorEntityDescription, ...] = ( - BinarySensorEntityDescription( + +@dataclass(kw_only=True) +class FaaDelaysBinarySensorEntityDescription(BinarySensorEntityDescription): + """Mixin for required keys.""" + + is_on_fn: Callable[[Airport], bool | None] + extra_state_attributes_fn: Callable[[Airport], Mapping[str, Any]] + + +FAA_BINARY_SENSORS: tuple[FaaDelaysBinarySensorEntityDescription, ...] = ( + FaaDelaysBinarySensorEntityDescription( key="GROUND_DELAY", - name="Ground Delay", + translation_key="ground_delay", icon="mdi:airport", + is_on_fn=lambda airport: airport.ground_delay.status, + extra_state_attributes_fn=lambda airport: { + "average": airport.ground_delay.average, + "reason": airport.ground_delay.reason, + }, ), - BinarySensorEntityDescription( + FaaDelaysBinarySensorEntityDescription( key="GROUND_STOP", - name="Ground Stop", + translation_key="ground_stop", icon="mdi:airport", + is_on_fn=lambda airport: airport.ground_stop.status, + extra_state_attributes_fn=lambda airport: { + "endtime": airport.ground_stop.endtime, + "reason": airport.ground_stop.reason, + }, ), - BinarySensorEntityDescription( + FaaDelaysBinarySensorEntityDescription( key="DEPART_DELAY", - name="Departure Delay", + translation_key="depart_delay", icon="mdi:airplane-takeoff", + is_on_fn=lambda airport: airport.depart_delay.status, + extra_state_attributes_fn=lambda airport: { + "minimum": airport.depart_delay.minimum, + "maximum": airport.depart_delay.maximum, + "trend": airport.depart_delay.trend, + "reason": airport.depart_delay.reason, + }, ), - BinarySensorEntityDescription( + FaaDelaysBinarySensorEntityDescription( key="ARRIVE_DELAY", - name="Arrival Delay", + translation_key="arrive_delay", icon="mdi:airplane-landing", + is_on_fn=lambda airport: airport.arrive_delay.status, + extra_state_attributes_fn=lambda airport: { + "minimum": airport.arrive_delay.minimum, + "maximum": airport.arrive_delay.maximum, + "trend": airport.arrive_delay.trend, + "reason": airport.arrive_delay.reason, + }, ), - BinarySensorEntityDescription( + FaaDelaysBinarySensorEntityDescription( key="CLOSURE", - name="Closure", + translation_key="closure", icon="mdi:airplane:off", + is_on_fn=lambda airport: airport.closure.status, + extra_state_attributes_fn=lambda airport: { + "begin": airport.closure.start, + "end": airport.closure.end, + }, ), ) @@ -57,60 +101,38 @@ async def async_setup_entry( async_add_entities(entities) -class FAABinarySensor(CoordinatorEntity, BinarySensorEntity): +class FAABinarySensor(CoordinatorEntity[FAADataUpdateCoordinator], BinarySensorEntity): """Define a binary sensor for FAA Delays.""" + _attr_has_entity_name = True + + entity_description: FaaDelaysBinarySensorEntityDescription + def __init__( - self, coordinator, entry_id, description: BinarySensorEntityDescription + self, + coordinator: FAADataUpdateCoordinator, + entry_id: str, + description: FaaDelaysBinarySensorEntityDescription, ) -> None: """Initialize the sensor.""" super().__init__(coordinator) self.entity_description = description - - self.coordinator = coordinator - self._entry_id = entry_id - self._attrs: dict[str, Any] = {} _id = coordinator.data.code self._attr_name = f"{_id} {description.name}" self._attr_unique_id = f"{_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, _id)}, + name=_id, + manufacturer="Federal Aviation Administration", + entry_type=DeviceEntryType.SERVICE, + ) @property - def is_on(self): + def is_on(self) -> bool | None: """Return the status of the sensor.""" - sensor_type = self.entity_description.key - if sensor_type == "GROUND_DELAY": - return self.coordinator.data.ground_delay.status - if sensor_type == "GROUND_STOP": - return self.coordinator.data.ground_stop.status - if sensor_type == "DEPART_DELAY": - return self.coordinator.data.depart_delay.status - if sensor_type == "ARRIVE_DELAY": - return self.coordinator.data.arrive_delay.status - if sensor_type == "CLOSURE": - return self.coordinator.data.closure.status - return None + return self.entity_description.is_on_fn(self.coordinator.data) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> Mapping[str, Any]: """Return attributes for sensor.""" - sensor_type = self.entity_description.key - if sensor_type == "GROUND_DELAY": - self._attrs["average"] = self.coordinator.data.ground_delay.average - self._attrs["reason"] = self.coordinator.data.ground_delay.reason - elif sensor_type == "GROUND_STOP": - self._attrs["endtime"] = self.coordinator.data.ground_stop.endtime - self._attrs["reason"] = self.coordinator.data.ground_stop.reason - elif sensor_type == "DEPART_DELAY": - self._attrs["minimum"] = self.coordinator.data.depart_delay.minimum - self._attrs["maximum"] = self.coordinator.data.depart_delay.maximum - self._attrs["trend"] = self.coordinator.data.depart_delay.trend - self._attrs["reason"] = self.coordinator.data.depart_delay.reason - elif sensor_type == "ARRIVE_DELAY": - self._attrs["minimum"] = self.coordinator.data.arrive_delay.minimum - self._attrs["maximum"] = self.coordinator.data.arrive_delay.maximum - self._attrs["trend"] = self.coordinator.data.arrive_delay.trend - self._attrs["reason"] = self.coordinator.data.arrive_delay.reason - elif sensor_type == "CLOSURE": - self._attrs["begin"] = self.coordinator.data.closure.start - self._attrs["end"] = self.coordinator.data.closure.end - return self._attrs + return self.entity_description.extra_state_attributes_fn(self.coordinator.data) diff --git a/homeassistant/components/faa_delays/config_flow.py b/homeassistant/components/faa_delays/config_flow.py index b2f7f69dd49..2f91ce9f797 100644 --- a/homeassistant/components/faa_delays/config_flow.py +++ b/homeassistant/components/faa_delays/config_flow.py @@ -1,5 +1,6 @@ """Config flow for FAA Delays integration.""" import logging +from typing import Any from aiohttp import ClientConnectionError import faadelays @@ -7,6 +8,7 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.const import CONF_ID +from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import aiohttp_client from .const import DOMAIN @@ -21,7 +23,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/faa_delays/coordinator.py b/homeassistant/components/faa_delays/coordinator.py index f2aefdada66..2f110cf7730 100644 --- a/homeassistant/components/faa_delays/coordinator.py +++ b/homeassistant/components/faa_delays/coordinator.py @@ -6,6 +6,7 @@ import logging from aiohttp import ClientConnectionError from faadelays import Airport +from homeassistant.core import HomeAssistant from homeassistant.helpers import aiohttp_client from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -14,19 +15,18 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) -class FAADataUpdateCoordinator(DataUpdateCoordinator): +class FAADataUpdateCoordinator(DataUpdateCoordinator[Airport]): """Class to manage fetching FAA API data from a single endpoint.""" - def __init__(self, hass, code): + def __init__(self, hass: HomeAssistant, code: str) -> None: """Initialize the coordinator.""" super().__init__( hass, _LOGGER, name=DOMAIN, update_interval=timedelta(minutes=1) ) self.session = aiohttp_client.async_get_clientsession(hass) self.data = Airport(code, self.session) - self.code = code - async def _async_update_data(self): + async def _async_update_data(self) -> Airport: try: async with asyncio.timeout(10): await self.data.update() diff --git a/homeassistant/components/faa_delays/strings.json b/homeassistant/components/faa_delays/strings.json index 92a9dafb4da..145c9e3ab34 100644 --- a/homeassistant/components/faa_delays/strings.json +++ b/homeassistant/components/faa_delays/strings.json @@ -17,5 +17,76 @@ "abort": { "already_configured": "This airport is already configured." } + }, + "entity": { + "binary_sensor": { + "ground_delay": { + "name": "Ground delay", + "state_attributes": { + "average": { + "name": "Average" + }, + "reason": { + "name": "Reason" + } + } + }, + "ground_stop": { + "name": "Ground stop", + "state_attributes": { + "endtime": { + "name": "End time" + }, + "reason": { + "name": "[%key:component::faa_delays::entity::binary_sensor::ground_delay::state_attributes::reason::name%]" + } + } + }, + "depart_delay": { + "name": "Departure delay", + "state_attributes": { + "minimum": { + "name": "Minimum" + }, + "maximum": { + "name": "Maximum" + }, + "trend": { + "name": "Trend" + }, + "reason": { + "name": "[%key:component::faa_delays::entity::binary_sensor::ground_delay::state_attributes::reason::name%]" + } + } + }, + "arrive_delay": { + "name": "Arrival delay", + "state_attributes": { + "minimum": { + "name": "[%key:component::faa_delays::entity::binary_sensor::depart_delay::state_attributes::minimum::name%]" + }, + "maximum": { + "name": "[%key:component::faa_delays::entity::binary_sensor::depart_delay::state_attributes::maximum::name%]" + }, + "trend": { + "name": "[%key:component::faa_delays::entity::binary_sensor::depart_delay::state_attributes::trend::name%]" + }, + "reason": { + "name": "[%key:component::faa_delays::entity::binary_sensor::ground_delay::state_attributes::reason::name%]" + } + } + }, + "closure": { + "name": "Closure", + "state_attributes": { + "begin": { + "name": "Begin" + }, + "end": { + "name": "End" + } + } + } + } } } diff --git a/homeassistant/components/fan/__init__.py b/homeassistant/components/fan/__init__.py index a149909e029..21ffca35962 100644 --- a/homeassistant/components/fan/__init__.py +++ b/homeassistant/components/fan/__init__.py @@ -18,7 +18,8 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_ON, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import ( # noqa: F401 PLATFORM_SCHEMA, @@ -77,8 +78,19 @@ ATTR_PRESET_MODES = "preset_modes" # mypy: disallow-any-generics -class NotValidPresetModeError(ValueError): - """Exception class when the preset_mode in not in the preset_modes list.""" +class NotValidPresetModeError(ServiceValidationError): + """Raised when the preset_mode is not in the preset_modes list.""" + + def __init__( + self, *args: object, translation_placeholders: dict[str, str] | None = None + ) -> None: + """Initialize the exception.""" + super().__init__( + *args, + translation_domain=DOMAIN, + translation_key="not_valid_preset_mode", + translation_placeholders=translation_placeholders, + ) @bind_hass @@ -107,7 +119,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ), vol.Optional(ATTR_PRESET_MODE): cv.string, }, - "async_turn_on", + "async_handle_turn_on_service", ) component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") @@ -156,7 +168,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SET_PRESET_MODE, {vol.Required(ATTR_PRESET_MODE): cv.string}, - "async_set_preset_mode", + "async_handle_set_preset_mode_service", [FanEntityFeature.SET_SPEED, FanEntityFeature.PRESET_MODE], ) @@ -237,17 +249,30 @@ class FanEntity(ToggleEntity): """Set new preset mode.""" raise NotImplementedError() + @final + async def async_handle_set_preset_mode_service(self, preset_mode: str) -> None: + """Validate and set new preset mode.""" + self._valid_preset_mode_or_raise(preset_mode) + await self.async_set_preset_mode(preset_mode) + async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" await self.hass.async_add_executor_job(self.set_preset_mode, preset_mode) + @final + @callback def _valid_preset_mode_or_raise(self, preset_mode: str) -> None: """Raise NotValidPresetModeError on invalid preset_mode.""" preset_modes = self.preset_modes if not preset_modes or preset_mode not in preset_modes: + preset_modes_str: str = ", ".join(preset_modes or []) raise NotValidPresetModeError( f"The preset_mode {preset_mode} is not a valid preset_mode:" - f" {preset_modes}" + f" {preset_modes}", + translation_placeholders={ + "preset_mode": preset_mode, + "preset_modes": preset_modes_str, + }, ) def set_direction(self, direction: str) -> None: @@ -267,6 +292,18 @@ class FanEntity(ToggleEntity): """Turn on the fan.""" raise NotImplementedError() + @final + async def async_handle_turn_on_service( + self, + percentage: int | None = None, + preset_mode: str | None = None, + **kwargs: Any, + ) -> None: + """Validate and turn on the fan.""" + if preset_mode is not None: + self._valid_preset_mode_or_raise(preset_mode) + await self.async_turn_on(percentage, preset_mode, **kwargs) + async def async_turn_on( self, percentage: int | None = None, diff --git a/homeassistant/components/fan/strings.json b/homeassistant/components/fan/strings.json index 674dcc2b92e..aab714d3e07 100644 --- a/homeassistant/components/fan/strings.json +++ b/homeassistant/components/fan/strings.json @@ -144,5 +144,10 @@ "reverse": "Reverse" } } + }, + "exceptions": { + "not_valid_preset_mode": { + "message": "Preset mode {preset_mode} is not valid, valid preset modes are: {preset_modes}." + } } } diff --git a/homeassistant/components/fastdotcom/__init__.py b/homeassistant/components/fastdotcom/__init__.py index 50e0cb04869..2fe5b3ccafc 100644 --- a/homeassistant/components/fastdotcom/__init__.py +++ b/homeassistant/components/fastdotcom/__init__.py @@ -8,23 +8,18 @@ from typing import Any from fastdotcom import fast_com import voluptuous as vol -from homeassistant.const import CONF_SCAN_INTERVAL, Platform +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.const import CONF_SCAN_INTERVAL from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import ConfigType -DOMAIN = "fastdotcom" -DATA_UPDATED = f"{DOMAIN}_data_updated" +from .const import CONF_MANUAL, DATA_UPDATED, DEFAULT_INTERVAL, DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) -CONF_MANUAL = "manual" - -DEFAULT_INTERVAL = timedelta(hours=1) - CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( @@ -40,38 +35,61 @@ CONFIG_SCHEMA = vol.Schema( ) -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: +async def async_setup_platform(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Fast.com component. (deprecated).""" + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config[DOMAIN], + ) + ) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up the Fast.com component.""" - conf = config[DOMAIN] data = hass.data[DOMAIN] = SpeedtestData(hass) - if not conf[CONF_MANUAL]: - async_track_time_interval(hass, data.update, conf[CONF_SCAN_INTERVAL]) + entry.async_on_unload( + async_track_time_interval(hass, data.update, timedelta(hours=DEFAULT_INTERVAL)) + ) + # Run an initial update to get a starting state + await data.update() - def update(service_call: ServiceCall | None = None) -> None: + async def update(service_call: ServiceCall | None = None) -> None: """Service call to manually update the data.""" - data.update() + await data.update() hass.services.async_register(DOMAIN, "speedtest", update) - hass.async_create_task( - async_load_platform(hass, Platform.SENSOR, DOMAIN, {}, config) + await hass.config_entries.async_forward_entry_setups( + entry, + PLATFORMS, ) return True +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Fast.com config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + hass.data.pop(DOMAIN) + return unload_ok + + class SpeedtestData: - """Get the latest data from fast.com.""" + """Get the latest data from Fast.com.""" def __init__(self, hass: HomeAssistant) -> None: """Initialize the data object.""" self.data: dict[str, Any] | None = None self._hass = hass - def update(self, now: datetime | None = None) -> None: + async def update(self, now: datetime | None = None) -> None: """Get the latest data from fast.com.""" - - _LOGGER.debug("Executing fast.com speedtest") - self.data = {"download": fast_com()} + _LOGGER.debug("Executing Fast.com speedtest") + fast_com_data = await self._hass.async_add_executor_job(fast_com) + self.data = {"download": fast_com_data} + _LOGGER.debug("Fast.com speedtest finished, with mbit/s: %s", fast_com_data) dispatcher_send(self._hass, DATA_UPDATED) diff --git a/homeassistant/components/fastdotcom/config_flow.py b/homeassistant/components/fastdotcom/config_flow.py new file mode 100644 index 00000000000..5ca35fd6802 --- /dev/null +++ b/homeassistant/components/fastdotcom/config_flow.py @@ -0,0 +1,50 @@ +"""Config flow for Fast.com integration.""" +from __future__ import annotations + +from typing import Any + +from homeassistant.config_entries import ConfigFlow +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN +from homeassistant.data_entry_flow import FlowResult +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue + +from .const import DEFAULT_NAME, DOMAIN + + +class FastdotcomConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Fast.com.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle the initial step.""" + if self._async_current_entries(): + return self.async_abort(reason="single_instance_allowed") + + if user_input is not None: + return self.async_create_entry(title=DEFAULT_NAME, data={}) + + return self.async_show_form(step_id="user") + + async def async_step_import( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle a flow initiated by configuration file.""" + async_create_issue( + self.hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.6.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Fast.com", + }, + ) + + return await self.async_step_user(user_input) diff --git a/homeassistant/components/fastdotcom/const.py b/homeassistant/components/fastdotcom/const.py new file mode 100644 index 00000000000..753825c4361 --- /dev/null +++ b/homeassistant/components/fastdotcom/const.py @@ -0,0 +1,15 @@ +"""Constants for the Fast.com integration.""" +import logging + +from homeassistant.const import Platform + +LOGGER = logging.getLogger(__package__) + +DOMAIN = "fastdotcom" +DATA_UPDATED = f"{DOMAIN}_data_updated" + +CONF_MANUAL = "manual" + +DEFAULT_NAME = "Fast.com" +DEFAULT_INTERVAL = 1 +PLATFORMS: list[Platform] = [Platform.SENSOR] diff --git a/homeassistant/components/fastdotcom/manifest.json b/homeassistant/components/fastdotcom/manifest.json index 73db5c0bf11..02fd3ade205 100644 --- a/homeassistant/components/fastdotcom/manifest.json +++ b/homeassistant/components/fastdotcom/manifest.json @@ -1,7 +1,8 @@ { "domain": "fastdotcom", "name": "Fast.com", - "codeowners": ["@rohankapoorcom"], + "codeowners": ["@rohankapoorcom", "@erwindouna"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/fastdotcom", "iot_class": "cloud_polling", "loggers": ["fastdotcom"], diff --git a/homeassistant/components/fastdotcom/sensor.py b/homeassistant/components/fastdotcom/sensor.py index b20b0213835..939ab4a40e5 100644 --- a/homeassistant/components/fastdotcom/sensor.py +++ b/homeassistant/components/fastdotcom/sensor.py @@ -8,29 +8,28 @@ from homeassistant.components.sensor import ( SensorEntity, SensorStateClass, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfDataRate from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DATA_UPDATED, DOMAIN as FASTDOTCOM_DOMAIN +from .const import DATA_UPDATED, DOMAIN -async def async_setup_platform( +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Fast.com sensor.""" - async_add_entities([SpeedtestSensor(hass.data[FASTDOTCOM_DOMAIN])]) + async_add_entities([SpeedtestSensor(entry.entry_id, hass.data[DOMAIN])]) # pylint: disable-next=hass-invalid-inheritance # needs fixing class SpeedtestSensor(RestoreEntity, SensorEntity): - """Implementation of a FAst.com sensor.""" + """Implementation of a Fast.com sensor.""" _attr_name = "Fast.com Download" _attr_device_class = SensorDeviceClass.DATA_RATE @@ -39,9 +38,10 @@ class SpeedtestSensor(RestoreEntity, SensorEntity): _attr_icon = "mdi:speedometer" _attr_should_poll = False - def __init__(self, speedtest_data: dict[str, Any]) -> None: + def __init__(self, entry_id: str, speedtest_data: dict[str, Any]) -> None: """Initialize the sensor.""" self._speedtest_data = speedtest_data + self._attr_unique_id = entry_id async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" diff --git a/homeassistant/components/fastdotcom/strings.json b/homeassistant/components/fastdotcom/strings.json index 705eada9387..d647250b423 100644 --- a/homeassistant/components/fastdotcom/strings.json +++ b/homeassistant/components/fastdotcom/strings.json @@ -1,4 +1,14 @@ { + "config": { + "step": { + "user": { + "description": "Do you want to start the setup? The initial setup will take about 30-40 seconds." + } + }, + "abort": { + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" + } + }, "services": { "speedtest": { "name": "Speed test", diff --git a/homeassistant/components/fints/manifest.json b/homeassistant/components/fints/manifest.json index 821298434d9..063e612d35d 100644 --- a/homeassistant/components/fints/manifest.json +++ b/homeassistant/components/fints/manifest.json @@ -3,6 +3,7 @@ "name": "FinTS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/fints", + "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["fints", "mt_940", "sepaxml"], "requirements": ["fints==3.1.0"] diff --git a/homeassistant/components/fints/sensor.py b/homeassistant/components/fints/sensor.py index 3b961054544..fafe1fcf2bf 100644 --- a/homeassistant/components/fints/sensor.py +++ b/homeassistant/components/fints/sensor.py @@ -168,14 +168,13 @@ class FinTsClient: if not account_information: return False - if not account_information["type"]: - # bank does not support account types, use value from config - if ( - account_information["iban"] in self.account_config - or account_information["account_number"] in self.account_config - ): - return True - elif 1 <= account_information["type"] <= 9: + if 1 <= account_information["type"] <= 9: + return True + + if ( + account_information["iban"] in self.account_config + or account_information["account_number"] in self.account_config + ): return True return False @@ -189,14 +188,13 @@ class FinTsClient: if not account_information: return False - if not account_information["type"]: - # bank does not support account types, use value from config - if ( - account_information["iban"] in self.holdings_config - or account_information["account_number"] in self.holdings_config - ): - return True - elif 30 <= account_information["type"] <= 39: + if 30 <= account_information["type"] <= 39: + return True + + if ( + account_information["iban"] in self.holdings_config + or account_information["account_number"] in self.holdings_config + ): return True return False @@ -215,7 +213,11 @@ class FinTsClient: holdings_accounts.append(account) else: - _LOGGER.warning("Could not determine type of account %s", account.iban) + _LOGGER.warning( + "Could not determine type of account %s from %s", + account.iban, + self.client.user_id, + ) return balance_accounts, holdings_accounts diff --git a/homeassistant/components/fivem/strings.json b/homeassistant/components/fivem/strings.json index 2ffb401f8c0..abdef61fb28 100644 --- a/homeassistant/components/fivem/strings.json +++ b/homeassistant/components/fivem/strings.json @@ -6,6 +6,9 @@ "name": "[%key:common::config_flow::data::name%]", "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your FiveM server." } } }, diff --git a/homeassistant/components/fjaraskupan/fan.py b/homeassistant/components/fjaraskupan/fan.py index 142694a6bfb..ee989bb2ee0 100644 --- a/homeassistant/components/fjaraskupan/fan.py +++ b/homeassistant/components/fjaraskupan/fan.py @@ -131,11 +131,9 @@ class Fan(CoordinatorEntity[FjaraskupanCoordinator], FanEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - if command := PRESET_TO_COMMAND.get(preset_mode): - async with self.coordinator.async_connect_and_update() as device: - await device.send_command(command) - else: - raise UnsupportedPreset(f"The preset {preset_mode} is unsupported") + command = PRESET_TO_COMMAND[preset_mode] + async with self.coordinator.async_connect_and_update() as device: + await device.send_command(command) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" diff --git a/homeassistant/components/flexit_bacnet/__init__.py b/homeassistant/components/flexit_bacnet/__init__.py new file mode 100644 index 00000000000..c9a0b332d93 --- /dev/null +++ b/homeassistant/components/flexit_bacnet/__init__.py @@ -0,0 +1,43 @@ +"""The Flexit Nordic (BACnet) integration.""" +from __future__ import annotations + +import asyncio.exceptions + +from flexit_bacnet import FlexitBACnet +from flexit_bacnet.bacnet import DecodingError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_DEVICE_ID, CONF_IP_ADDRESS, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from .const import DOMAIN + +PLATFORMS: list[Platform] = [Platform.CLIMATE] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Flexit Nordic (BACnet) from a config entry.""" + + device = FlexitBACnet(entry.data[CONF_IP_ADDRESS], entry.data[CONF_DEVICE_ID]) + + try: + await device.update() + except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: + raise ConfigEntryNotReady( + f"Timeout while connecting to {entry.data['address']}" + ) from exc + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = device + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + hass.data[DOMAIN].pop(entry.entry_id) + + return unload_ok diff --git a/homeassistant/components/flexit_bacnet/climate.py b/homeassistant/components/flexit_bacnet/climate.py new file mode 100644 index 00000000000..28f4a6ae178 --- /dev/null +++ b/homeassistant/components/flexit_bacnet/climate.py @@ -0,0 +1,148 @@ +"""The Flexit Nordic (BACnet) integration.""" +import asyncio.exceptions +from typing import Any + +from flexit_bacnet import ( + VENTILATION_MODE_AWAY, + VENTILATION_MODE_HOME, + VENTILATION_MODE_STOP, + FlexitBACnet, +) +from flexit_bacnet.bacnet import DecodingError + +from homeassistant.components.climate import ( + PRESET_AWAY, + PRESET_BOOST, + PRESET_HOME, + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import ( + DOMAIN, + PRESET_TO_VENTILATION_MODE_MAP, + VENTILATION_TO_PRESET_MODE_MAP, +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_devices: AddEntitiesCallback, +) -> None: + """Set up the Flexit Nordic unit.""" + device = hass.data[DOMAIN][config_entry.entry_id] + + async_add_devices([FlexitClimateEntity(device)]) + + +class FlexitClimateEntity(ClimateEntity): + """Flexit air handling unit.""" + + _attr_name = None + + _attr_has_entity_name = True + + _attr_hvac_modes = [ + HVACMode.OFF, + HVACMode.FAN_ONLY, + ] + + _attr_preset_modes = [ + PRESET_AWAY, + PRESET_HOME, + PRESET_BOOST, + ] + + _attr_supported_features = ( + ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE + ) + + _attr_target_temperature_step = PRECISION_WHOLE + _attr_temperature_unit = UnitOfTemperature.CELSIUS + + def __init__(self, device: FlexitBACnet) -> None: + """Initialize the unit.""" + self._device = device + self._attr_unique_id = device.serial_number + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, device.serial_number), + }, + name=device.device_name, + manufacturer="Flexit", + model="Nordic", + serial_number=device.serial_number, + ) + + async def async_update(self) -> None: + """Refresh unit state.""" + await self._device.update() + + @property + def current_temperature(self) -> float: + """Return the current temperature.""" + return self._device.room_temperature + + @property + def target_temperature(self) -> float: + """Return the temperature we try to reach.""" + if self._device.ventilation_mode == VENTILATION_MODE_AWAY: + return self._device.air_temp_setpoint_away + + return self._device.air_temp_setpoint_home + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: + return + + try: + if self._device.ventilation_mode == VENTILATION_MODE_AWAY: + await self._device.set_air_temp_setpoint_away(temperature) + else: + await self._device.set_air_temp_setpoint_home(temperature) + except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: + raise HomeAssistantError from exc + + @property + def preset_mode(self) -> str: + """Return the current preset mode, e.g., home, away, temp. + + Requires ClimateEntityFeature.PRESET_MODE. + """ + return VENTILATION_TO_PRESET_MODE_MAP[self._device.ventilation_mode] + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + ventilation_mode = PRESET_TO_VENTILATION_MODE_MAP[preset_mode] + + try: + await self._device.set_ventilation_mode(ventilation_mode) + except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: + raise HomeAssistantError from exc + + @property + def hvac_mode(self) -> HVACMode: + """Return hvac operation ie. heat, cool mode.""" + if self._device.ventilation_mode == VENTILATION_MODE_STOP: + return HVACMode.OFF + + return HVACMode.FAN_ONLY + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + try: + if hvac_mode == HVACMode.OFF: + await self._device.set_ventilation_mode(VENTILATION_MODE_STOP) + else: + await self._device.set_ventilation_mode(VENTILATION_MODE_HOME) + except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: + raise HomeAssistantError from exc diff --git a/homeassistant/components/flexit_bacnet/config_flow.py b/homeassistant/components/flexit_bacnet/config_flow.py new file mode 100644 index 00000000000..2c87dfc5b97 --- /dev/null +++ b/homeassistant/components/flexit_bacnet/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Flexit Nordic (BACnet) integration.""" +from __future__ import annotations + +import asyncio.exceptions +import logging +from typing import Any + +from flexit_bacnet import FlexitBACnet +from flexit_bacnet.bacnet import DecodingError +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_DEVICE_ID, CONF_IP_ADDRESS +from homeassistant.data_entry_flow import FlowResult + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_DEVICE_ID = 2 + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_IP_ADDRESS): str, + vol.Required(CONF_DEVICE_ID, default=DEFAULT_DEVICE_ID): int, + } +) + + +class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for Flexit Nordic (BACnet).""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + + if user_input is not None: + device = FlexitBACnet( + user_input[CONF_IP_ADDRESS], user_input[CONF_DEVICE_ID] + ) + try: + await device.update() + except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError): + errors["base"] = "cannot_connect" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(device.serial_number) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=device.device_name, data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/flexit_bacnet/const.py b/homeassistant/components/flexit_bacnet/const.py new file mode 100644 index 00000000000..269a88c4cec --- /dev/null +++ b/homeassistant/components/flexit_bacnet/const.py @@ -0,0 +1,30 @@ +"""Constants for the Flexit Nordic (BACnet) integration.""" +from flexit_bacnet import ( + VENTILATION_MODE_AWAY, + VENTILATION_MODE_HIGH, + VENTILATION_MODE_HOME, + VENTILATION_MODE_STOP, +) + +from homeassistant.components.climate import ( + PRESET_AWAY, + PRESET_BOOST, + PRESET_HOME, + PRESET_NONE, +) + +DOMAIN = "flexit_bacnet" + +VENTILATION_TO_PRESET_MODE_MAP = { + VENTILATION_MODE_STOP: PRESET_NONE, + VENTILATION_MODE_AWAY: PRESET_AWAY, + VENTILATION_MODE_HOME: PRESET_HOME, + VENTILATION_MODE_HIGH: PRESET_BOOST, +} + +PRESET_TO_VENTILATION_MODE_MAP = { + PRESET_NONE: VENTILATION_MODE_STOP, + PRESET_AWAY: VENTILATION_MODE_AWAY, + PRESET_HOME: VENTILATION_MODE_HOME, + PRESET_BOOST: VENTILATION_MODE_HIGH, +} diff --git a/homeassistant/components/flexit_bacnet/manifest.json b/homeassistant/components/flexit_bacnet/manifest.json new file mode 100644 index 00000000000..d230e4ebb7a --- /dev/null +++ b/homeassistant/components/flexit_bacnet/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "flexit_bacnet", + "name": "Flexit Nordic (BACnet)", + "codeowners": ["@lellky", "@piotrbulinski"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/flexit_bacnet", + "integration_type": "device", + "iot_class": "local_polling", + "requirements": ["flexit_bacnet==2.1.0"] +} diff --git a/homeassistant/components/flexit_bacnet/strings.json b/homeassistant/components/flexit_bacnet/strings.json new file mode 100644 index 00000000000..fd2725c6403 --- /dev/null +++ b/homeassistant/components/flexit_bacnet/strings.json @@ -0,0 +1,19 @@ +{ + "config": { + "step": { + "user": { + "data": { + "ip_address": "[%key:common::config_flow::data::ip%]", + "device_id": "[%key:common::config_flow::data::device%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/components/flo/strings.json b/homeassistant/components/flo/strings.json index 627f562be7e..3444911fbd4 100644 --- a/homeassistant/components/flo/strings.json +++ b/homeassistant/components/flo/strings.json @@ -6,6 +6,9 @@ "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your Flo device." } } }, diff --git a/homeassistant/components/foscam/strings.json b/homeassistant/components/foscam/strings.json index 35964ee4546..de22006b274 100644 --- a/homeassistant/components/foscam/strings.json +++ b/homeassistant/components/foscam/strings.json @@ -9,6 +9,9 @@ "password": "[%key:common::config_flow::data::password%]", "rtsp_port": "RTSP port", "stream": "Stream" + }, + "data_description": { + "host": "The hostname or IP address of your Foscam camera." } } }, diff --git a/homeassistant/components/freebox/alarm_control_panel.py b/homeassistant/components/freebox/alarm_control_panel.py index 52b7109045c..be3d88cf5b4 100644 --- a/homeassistant/components/freebox/alarm_control_panel.py +++ b/homeassistant/components/freebox/alarm_control_panel.py @@ -1,5 +1,4 @@ """Support for Freebox alarms.""" -import logging from typing import Any from homeassistant.components.alarm_control_panel import ( @@ -9,7 +8,7 @@ from homeassistant.components.alarm_control_panel import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMING, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, @@ -25,16 +24,14 @@ FREEBOX_TO_STATUS = { "alarm1_arming": STATE_ALARM_ARMING, "alarm2_arming": STATE_ALARM_ARMING, "alarm1_armed": STATE_ALARM_ARMED_AWAY, - "alarm2_armed": STATE_ALARM_ARMED_NIGHT, + "alarm2_armed": STATE_ALARM_ARMED_HOME, "alarm1_alert_timer": STATE_ALARM_TRIGGERED, "alarm2_alert_timer": STATE_ALARM_TRIGGERED, "alert": STATE_ALARM_TRIGGERED, + "idle": STATE_ALARM_DISARMED, } -_LOGGER = logging.getLogger(__name__) - - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: @@ -76,63 +73,33 @@ class FreeboxAlarm(FreeboxHomeEntity, AlarmControlPanelEntity): self._command_state = self.get_command_id( node["type"]["endpoints"], "signal", "state" ) - self._set_features(self._router.home_devices[self._id]) + + self._attr_supported_features = ( + AlarmControlPanelEntityFeature.ARM_AWAY + | (AlarmControlPanelEntityFeature.ARM_HOME if self._command_arm_home else 0) + | AlarmControlPanelEntityFeature.TRIGGER + ) async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - if await self.set_home_endpoint_value(self._command_disarm): - self._set_state(STATE_ALARM_DISARMED) + await self.set_home_endpoint_value(self._command_disarm) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - if await self.set_home_endpoint_value(self._command_arm_away): - self._set_state(STATE_ALARM_ARMING) + await self.set_home_endpoint_value(self._command_arm_away) async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - if await self.set_home_endpoint_value(self._command_arm_home): - self._set_state(STATE_ALARM_ARMING) + await self.set_home_endpoint_value(self._command_arm_home) async def async_alarm_trigger(self, code: str | None = None) -> None: """Send alarm trigger command.""" - if await self.set_home_endpoint_value(self._command_trigger): - self._set_state(STATE_ALARM_TRIGGERED) + await self.set_home_endpoint_value(self._command_trigger) - async def async_update_signal(self): - """Update signal.""" - state = await self.get_home_endpoint_value(self._command_state) - if state: - self._set_state(state) - - def _set_features(self, node: dict[str, Any]) -> None: - """Add alarm features.""" - # Search if the arm home feature is present => has an "alarm2" endpoint - can_arm_home = False - for nodeid, local_node in self._router.home_devices.items(): - if nodeid == local_node["id"]: - alarm2 = next( - filter( - lambda x: (x["name"] == "alarm2" and x["ep_type"] == "signal"), - local_node["show_endpoints"], - ), - None, - ) - if alarm2: - can_arm_home = alarm2["value"] - break - - if can_arm_home: - self._attr_supported_features = ( - AlarmControlPanelEntityFeature.ARM_AWAY - | AlarmControlPanelEntityFeature.ARM_HOME - ) - - else: - self._attr_supported_features = AlarmControlPanelEntityFeature.ARM_AWAY - - def _set_state(self, state: str) -> None: + async def async_update(self) -> None: """Update state.""" - self._attr_state = FREEBOX_TO_STATUS.get(state) - if not self._attr_state: - self._attr_state = STATE_ALARM_DISARMED - self.async_write_ha_state() + state: str | None = await self.get_home_endpoint_value(self._command_state) + if state: + self._attr_state = FREEBOX_TO_STATUS.get(state) + else: + self._attr_state = None diff --git a/homeassistant/components/freebox/home_base.py b/homeassistant/components/freebox/home_base.py index 2cc1a5fcfe3..022528e5ea7 100644 --- a/homeassistant/components/freebox/home_base.py +++ b/homeassistant/components/freebox/home_base.py @@ -131,13 +131,14 @@ class FreeboxHomeEntity(Entity): def get_value(self, ep_type: str, name: str): """Get the value.""" node = next( - filter( - lambda x: (x["name"] == name and x["ep_type"] == ep_type), - self._node["show_endpoints"], + ( + endpoint + for endpoint in self._node["show_endpoints"] + if endpoint["name"] == name and endpoint["ep_type"] == ep_type ), None, ) - if not node: + if node is None: _LOGGER.warning( "The Freebox Home device has no node value for: %s/%s", ep_type, name ) diff --git a/homeassistant/components/freebox/router.py b/homeassistant/components/freebox/router.py index 6a73624a776..765761c43f2 100644 --- a/homeassistant/components/freebox/router.py +++ b/homeassistant/components/freebox/router.py @@ -4,9 +4,11 @@ from __future__ import annotations from collections.abc import Mapping from contextlib import suppress from datetime import datetime +import json import logging import os from pathlib import Path +import re from typing import Any from freebox_api import Freepybox @@ -36,6 +38,20 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +def is_json(json_str): + """Validate if a String is a JSON value or not.""" + try: + json.loads(json_str) + return True + except (ValueError, TypeError) as err: + _LOGGER.error( + "Failed to parse JSON '%s', error '%s'", + json_str, + err, + ) + return False + + async def get_api(hass: HomeAssistant, host: str) -> Freepybox: """Get the Freebox API.""" freebox_path = Store(hass, STORAGE_VERSION, STORAGE_KEY).path @@ -69,6 +85,7 @@ class FreeboxRouter: self._sw_v: str = freebox_config["firmware_version"] self._attrs: dict[str, Any] = {} + self.supports_hosts = True self.devices: dict[str, dict[str, Any]] = {} self.disks: dict[int, dict[str, Any]] = {} self.supports_raid = True @@ -89,7 +106,32 @@ class FreeboxRouter: async def update_device_trackers(self) -> None: """Update Freebox devices.""" new_device = False - fbx_devices: list[dict[str, Any]] = await self._api.lan.get_hosts_list() + + fbx_devices: list[dict[str, Any]] = [] + + # Access to Host list not available in bridge mode, API return error_code 'nodev' + if self.supports_hosts: + try: + fbx_devices = await self._api.lan.get_hosts_list() + except HttpRequestError as err: + if ( + ( + matcher := re.search( + r"Request failed \(APIResponse: (.+)\)", str(err) + ) + ) + and is_json(json_str := matcher.group(1)) + and (json_resp := json.loads(json_str)).get("error_code") == "nodev" + ): + # No need to retry, Host list not available + self.supports_hosts = False + _LOGGER.debug( + "Host list is not available using bridge mode (%s)", + json_resp.get("msg"), + ) + + else: + raise err # Adds the Freebox itself fbx_devices.append( diff --git a/homeassistant/components/freebox/strings.json b/homeassistant/components/freebox/strings.json index 5c4143b4562..eaa56a38da1 100644 --- a/homeassistant/components/freebox/strings.json +++ b/homeassistant/components/freebox/strings.json @@ -5,6 +5,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your Freebox router." } }, "link": { diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 7cbb10a236b..5eed2f59fc4 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -26,6 +26,9 @@ "port": "[%key:common::config_flow::data::port%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your FRITZ!Box router." } } }, diff --git a/homeassistant/components/fritzbox/binary_sensor.py b/homeassistant/components/fritzbox/binary_sensor.py index 5d30362627e..2460635351e 100644 --- a/homeassistant/components/fritzbox/binary_sensor.py +++ b/homeassistant/components/fritzbox/binary_sensor.py @@ -14,12 +14,11 @@ from homeassistant.components.binary_sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import FritzBoxDeviceEntity -from .const import CONF_COORDINATOR, DOMAIN as FRITZBOX_DOMAIN -from .coordinator import FritzboxDataUpdateCoordinator +from .common import get_coordinator from .model import FritzEntityDescriptionMixinBase @@ -68,18 +67,23 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the FRITZ!SmartHome binary sensor from ConfigEntry.""" - coordinator: FritzboxDataUpdateCoordinator = hass.data[FRITZBOX_DOMAIN][ - entry.entry_id - ][CONF_COORDINATOR] + coordinator = get_coordinator(hass, entry.entry_id) - async_add_entities( - [ + @callback + def _add_entities() -> None: + """Add devices.""" + if not coordinator.new_devices: + return + async_add_entities( FritzboxBinarySensor(coordinator, ain, description) - for ain, device in coordinator.data.devices.items() + for ain in coordinator.new_devices for description in BINARY_SENSOR_TYPES - if description.suitable(device) - ] - ) + if description.suitable(coordinator.data.devices[ain]) + ) + + entry.async_on_unload(coordinator.async_add_listener(_add_entities)) + + _add_entities() class FritzboxBinarySensor(FritzBoxDeviceEntity, BinarySensorEntity): diff --git a/homeassistant/components/fritzbox/button.py b/homeassistant/components/fritzbox/button.py index cc5457fb8a2..732c41bfb7d 100644 --- a/homeassistant/components/fritzbox/button.py +++ b/homeassistant/components/fritzbox/button.py @@ -3,25 +3,33 @@ from pyfritzhome.devicetypes import FritzhomeTemplate from homeassistant.components.button import ButtonEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzboxDataUpdateCoordinator, FritzBoxEntity -from .const import CONF_COORDINATOR, DOMAIN as FRITZBOX_DOMAIN +from . import FritzBoxEntity +from .common import get_coordinator +from .const import DOMAIN async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the FRITZ!SmartHome template from ConfigEntry.""" - coordinator: FritzboxDataUpdateCoordinator = hass.data[FRITZBOX_DOMAIN][ - entry.entry_id - ][CONF_COORDINATOR] + coordinator = get_coordinator(hass, entry.entry_id) - async_add_entities( - [FritzBoxTemplate(coordinator, ain) for ain in coordinator.data.templates] - ) + @callback + def _add_entities() -> None: + """Add templates.""" + if not coordinator.new_templates: + return + async_add_entities( + FritzBoxTemplate(coordinator, ain) for ain in coordinator.new_templates + ) + + entry.async_on_unload(coordinator.async_add_listener(_add_entities)) + + _add_entities() class FritzBoxTemplate(FritzBoxEntity, ButtonEntity): @@ -37,7 +45,7 @@ class FritzBoxTemplate(FritzBoxEntity, ButtonEntity): """Return device specific attributes.""" return DeviceInfo( name=self.data.name, - identifiers={(FRITZBOX_DOMAIN, self.ain)}, + identifiers={(DOMAIN, self.ain)}, configuration_url=self.coordinator.configuration_url, manufacturer="AVM", model="SmartHome Template", diff --git a/homeassistant/components/fritzbox/climate.py b/homeassistant/components/fritzbox/climate.py index 7c846789637..70359d9b2af 100644 --- a/homeassistant/components/fritzbox/climate.py +++ b/homeassistant/components/fritzbox/climate.py @@ -18,17 +18,16 @@ from homeassistant.const import ( PRECISION_HALVES, UnitOfTemperature, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzboxDataUpdateCoordinator, FritzBoxDeviceEntity +from . import FritzBoxDeviceEntity +from .common import get_coordinator from .const import ( ATTR_STATE_BATTERY_LOW, ATTR_STATE_HOLIDAY_MODE, ATTR_STATE_SUMMER_MODE, ATTR_STATE_WINDOW_OPEN, - CONF_COORDINATOR, - DOMAIN as FRITZBOX_DOMAIN, ) from .model import ClimateExtraAttributes @@ -50,17 +49,22 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the FRITZ!SmartHome thermostat from ConfigEntry.""" - coordinator: FritzboxDataUpdateCoordinator = hass.data[FRITZBOX_DOMAIN][ - entry.entry_id - ][CONF_COORDINATOR] + coordinator = get_coordinator(hass, entry.entry_id) - async_add_entities( - [ + @callback + def _add_entities() -> None: + """Add devices.""" + if not coordinator.new_devices: + return + async_add_entities( FritzboxThermostat(coordinator, ain) - for ain, device in coordinator.data.devices.items() - if device.has_thermostat - ] - ) + for ain in coordinator.new_devices + if coordinator.data.devices[ain].has_thermostat + ) + + entry.async_on_unload(coordinator.async_add_listener(_add_entities)) + + _add_entities() class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): diff --git a/homeassistant/components/fritzbox/common.py b/homeassistant/components/fritzbox/common.py new file mode 100644 index 00000000000..ab87a51f9ce --- /dev/null +++ b/homeassistant/components/fritzbox/common.py @@ -0,0 +1,16 @@ +"""Common functions for fritzbox integration.""" + +from homeassistant.core import HomeAssistant + +from .const import CONF_COORDINATOR, DOMAIN +from .coordinator import FritzboxDataUpdateCoordinator + + +def get_coordinator( + hass: HomeAssistant, config_entry_id: str +) -> FritzboxDataUpdateCoordinator: + """Get coordinator for given config entry id.""" + coordinator: FritzboxDataUpdateCoordinator = hass.data[DOMAIN][config_entry_id][ + CONF_COORDINATOR + ] + return coordinator diff --git a/homeassistant/components/fritzbox/coordinator.py b/homeassistant/components/fritzbox/coordinator.py index 194825e602f..f6d210e367a 100644 --- a/homeassistant/components/fritzbox/coordinator.py +++ b/homeassistant/components/fritzbox/coordinator.py @@ -37,6 +37,8 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat self.fritz: Fritzhome = hass.data[DOMAIN][self.entry.entry_id][CONF_CONNECTIONS] self.configuration_url = self.fritz.get_prefixed_host() self.has_templates = has_templates + self.new_devices: set[str] = set() + self.new_templates: set[str] = set() super().__init__( hass, @@ -45,6 +47,8 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat update_interval=timedelta(seconds=30), ) + self.data = FritzboxCoordinatorData({}, {}) + def _update_fritz_devices(self) -> FritzboxCoordinatorData: """Update all fritzbox device data.""" try: @@ -87,6 +91,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat for template in templates: template_data[template.ain] = template + self.new_devices = device_data.keys() - self.data.devices.keys() + self.new_templates = template_data.keys() - self.data.templates.keys() + return FritzboxCoordinatorData(devices=device_data, templates=template_data) async def _async_update_data(self) -> FritzboxCoordinatorData: diff --git a/homeassistant/components/fritzbox/cover.py b/homeassistant/components/fritzbox/cover.py index df3b1562f9b..7d27356fdf9 100644 --- a/homeassistant/components/fritzbox/cover.py +++ b/homeassistant/components/fritzbox/cover.py @@ -10,26 +10,33 @@ from homeassistant.components.cover import ( CoverEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzboxDataUpdateCoordinator, FritzBoxDeviceEntity -from .const import CONF_COORDINATOR, DOMAIN as FRITZBOX_DOMAIN +from . import FritzBoxDeviceEntity +from .common import get_coordinator async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the FRITZ!SmartHome cover from ConfigEntry.""" - coordinator: FritzboxDataUpdateCoordinator = hass.data[FRITZBOX_DOMAIN][ - entry.entry_id - ][CONF_COORDINATOR] + coordinator = get_coordinator(hass, entry.entry_id) - async_add_entities( - FritzboxCover(coordinator, ain) - for ain, device in coordinator.data.devices.items() - if device.has_blind - ) + @callback + def _add_entities() -> None: + """Add devices.""" + if not coordinator.new_devices: + return + async_add_entities( + FritzboxCover(coordinator, ain) + for ain in coordinator.new_devices + if coordinator.data.devices[ain].has_blind + ) + + entry.async_on_unload(coordinator.async_add_listener(_add_entities)) + + _add_entities() class FritzboxCover(FritzBoxDeviceEntity, CoverEntity): diff --git a/homeassistant/components/fritzbox/light.py b/homeassistant/components/fritzbox/light.py index f83dd454592..d31ccd180c4 100644 --- a/homeassistant/components/fritzbox/light.py +++ b/homeassistant/components/fritzbox/light.py @@ -13,17 +13,12 @@ from homeassistant.components.light import ( LightEntity, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import FritzboxDataUpdateCoordinator, FritzBoxDeviceEntity -from .const import ( - COLOR_MODE, - COLOR_TEMP_MODE, - CONF_COORDINATOR, - DOMAIN as FRITZBOX_DOMAIN, - LOGGER, -) +from .common import get_coordinator +from .const import COLOR_MODE, COLOR_TEMP_MODE, LOGGER SUPPORTED_COLOR_MODES = {ColorMode.COLOR_TEMP, ColorMode.HS} @@ -32,31 +27,27 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the FRITZ!SmartHome light from ConfigEntry.""" - entities: list[FritzboxLight] = [] - coordinator: FritzboxDataUpdateCoordinator = hass.data[FRITZBOX_DOMAIN][ - entry.entry_id - ][CONF_COORDINATOR] + coordinator = get_coordinator(hass, entry.entry_id) - for ain, device in coordinator.data.devices.items(): - if not device.has_lightbulb: - continue - - supported_color_temps = await hass.async_add_executor_job( - device.get_color_temps - ) - - supported_colors = await hass.async_add_executor_job(device.get_colors) - - entities.append( + @callback + def _add_entities() -> None: + """Add devices.""" + if not coordinator.new_devices: + return + async_add_entities( FritzboxLight( coordinator, ain, - supported_colors, - supported_color_temps, + device.get_colors(), + device.get_color_temps(), ) + for ain in coordinator.new_devices + if (device := coordinator.data.devices[ain]).has_lightbulb ) - async_add_entities(entities) + entry.async_on_unload(coordinator.async_add_listener(_add_entities)) + + _add_entities() class FritzboxLight(FritzBoxDeviceEntity, LightEntity): diff --git a/homeassistant/components/fritzbox/sensor.py b/homeassistant/components/fritzbox/sensor.py index 013c1dfc7b5..1e5d7754934 100644 --- a/homeassistant/components/fritzbox/sensor.py +++ b/homeassistant/components/fritzbox/sensor.py @@ -25,13 +25,13 @@ from homeassistant.const import ( UnitOfPower, UnitOfTemperature, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.util.dt import utc_from_timestamp from . import FritzBoxDeviceEntity -from .const import CONF_COORDINATOR, DOMAIN as FRITZBOX_DOMAIN +from .common import get_coordinator from .model import FritzEntityDescriptionMixinBase @@ -212,16 +212,23 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the FRITZ!SmartHome sensor from ConfigEntry.""" - coordinator = hass.data[FRITZBOX_DOMAIN][entry.entry_id][CONF_COORDINATOR] + coordinator = get_coordinator(hass, entry.entry_id) - async_add_entities( - [ + @callback + def _add_entities() -> None: + """Add devices.""" + if not coordinator.new_devices: + return + async_add_entities( FritzBoxSensor(coordinator, ain, description) - for ain, device in coordinator.data.devices.items() + for ain in coordinator.new_devices for description in SENSOR_TYPES - if description.suitable(device) - ] - ) + if description.suitable(coordinator.data.devices[ain]) + ) + + entry.async_on_unload(coordinator.async_add_listener(_add_entities)) + + _add_entities() class FritzBoxSensor(FritzBoxDeviceEntity, SensorEntity): diff --git a/homeassistant/components/fritzbox/strings.json b/homeassistant/components/fritzbox/strings.json index d5607aa3090..f4d2fe3670e 100644 --- a/homeassistant/components/fritzbox/strings.json +++ b/homeassistant/components/fritzbox/strings.json @@ -8,6 +8,9 @@ "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your FRITZ!Box router." } }, "confirm": { diff --git a/homeassistant/components/fritzbox/switch.py b/homeassistant/components/fritzbox/switch.py index 5eee3019633..617a5242c5b 100644 --- a/homeassistant/components/fritzbox/switch.py +++ b/homeassistant/components/fritzbox/switch.py @@ -5,28 +5,33 @@ from typing import Any from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FritzboxDataUpdateCoordinator, FritzBoxDeviceEntity -from .const import CONF_COORDINATOR, DOMAIN as FRITZBOX_DOMAIN +from . import FritzBoxDeviceEntity +from .common import get_coordinator async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the FRITZ!SmartHome switch from ConfigEntry.""" - coordinator: FritzboxDataUpdateCoordinator = hass.data[FRITZBOX_DOMAIN][ - entry.entry_id - ][CONF_COORDINATOR] + coordinator = get_coordinator(hass, entry.entry_id) - async_add_entities( - [ + @callback + def _add_entities() -> None: + """Add devices.""" + if not coordinator.new_devices: + return + async_add_entities( FritzboxSwitch(coordinator, ain) - for ain, device in coordinator.data.devices.items() - if device.has_switch - ] - ) + for ain in coordinator.new_devices + if coordinator.data.devices[ain].has_switch + ) + + entry.async_on_unload(coordinator.async_add_listener(_add_entities)) + + _add_entities() class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity): diff --git a/homeassistant/components/fritzbox_callmonitor/strings.json b/homeassistant/components/fritzbox_callmonitor/strings.json index 89f049bfbe9..ac36942eec2 100644 --- a/homeassistant/components/fritzbox_callmonitor/strings.json +++ b/homeassistant/components/fritzbox_callmonitor/strings.json @@ -8,6 +8,9 @@ "port": "[%key:common::config_flow::data::port%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your FRITZ!Box router." } }, "phonebook": { diff --git a/homeassistant/components/fronius/const.py b/homeassistant/components/fronius/const.py index 4060731b21c..18f35de8336 100644 --- a/homeassistant/components/fronius/const.py +++ b/homeassistant/components/fronius/const.py @@ -1,7 +1,9 @@ """Constants for the Fronius integration.""" +from enum import StrEnum from typing import Final, NamedTuple, TypedDict from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.typing import StateType DOMAIN: Final = "fronius" @@ -25,3 +27,97 @@ class FroniusDeviceInfo(NamedTuple): device_info: DeviceInfo solar_net_id: SolarNetId unique_id: str + + +class InverterStatusCodeOption(StrEnum): + """Status codes for Fronius inverters.""" + + # these are keys for state translations - so snake_case is used + STARTUP = "startup" + RUNNING = "running" + STANDBY = "standby" + BOOTLOADING = "bootloading" + ERROR = "error" + IDLE = "idle" + READY = "ready" + SLEEPING = "sleeping" + UNKNOWN = "unknown" + INVALID = "invalid" + + +_INVERTER_STATUS_CODES: Final[dict[int, InverterStatusCodeOption]] = { + 0: InverterStatusCodeOption.STARTUP, + 1: InverterStatusCodeOption.STARTUP, + 2: InverterStatusCodeOption.STARTUP, + 3: InverterStatusCodeOption.STARTUP, + 4: InverterStatusCodeOption.STARTUP, + 5: InverterStatusCodeOption.STARTUP, + 6: InverterStatusCodeOption.STARTUP, + 7: InverterStatusCodeOption.RUNNING, + 8: InverterStatusCodeOption.STANDBY, + 9: InverterStatusCodeOption.BOOTLOADING, + 10: InverterStatusCodeOption.ERROR, + 11: InverterStatusCodeOption.IDLE, + 12: InverterStatusCodeOption.READY, + 13: InverterStatusCodeOption.SLEEPING, + 255: InverterStatusCodeOption.UNKNOWN, +} + + +def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption: + """Return a status message for a given status code.""" + return _INVERTER_STATUS_CODES.get(code, InverterStatusCodeOption.INVALID) # type: ignore[arg-type] + + +class MeterLocationCodeOption(StrEnum): + """Meter location codes for Fronius meters.""" + + # these are keys for state translations - so snake_case is used + FEED_IN = "feed_in" + CONSUMPTION_PATH = "consumption_path" + GENERATOR = "external_generator" + EXT_BATTERY = "external_battery" + SUBLOAD = "subload" + + +def get_meter_location_description(code: StateType) -> MeterLocationCodeOption | None: + """Return a location_description for a given location code.""" + match int(code): # type: ignore[arg-type] + case 0: + return MeterLocationCodeOption.FEED_IN + case 1: + return MeterLocationCodeOption.CONSUMPTION_PATH + case 3: + return MeterLocationCodeOption.GENERATOR + case 4: + return MeterLocationCodeOption.EXT_BATTERY + case _ as _code if 256 <= _code <= 511: + return MeterLocationCodeOption.SUBLOAD + return None + + +class OhmPilotStateCodeOption(StrEnum): + """OhmPilot state codes for Fronius inverters.""" + + # these are keys for state translations - so snake_case is used + UP_AND_RUNNING = "up_and_running" + KEEP_MINIMUM_TEMPERATURE = "keep_minimum_temperature" + LEGIONELLA_PROTECTION = "legionella_protection" + CRITICAL_FAULT = "critical_fault" + FAULT = "fault" + BOOST_MODE = "boost_mode" + + +_OHMPILOT_STATE_CODES: Final[dict[int, OhmPilotStateCodeOption]] = { + 0: OhmPilotStateCodeOption.UP_AND_RUNNING, + 1: OhmPilotStateCodeOption.KEEP_MINIMUM_TEMPERATURE, + 2: OhmPilotStateCodeOption.LEGIONELLA_PROTECTION, + 3: OhmPilotStateCodeOption.CRITICAL_FAULT, + 4: OhmPilotStateCodeOption.FAULT, + 5: OhmPilotStateCodeOption.BOOST_MODE, +} + + +def get_ohmpilot_state_message(code: StateType) -> OhmPilotStateCodeOption | None: + """Return a status message for a given status code.""" + return _OHMPILOT_STATE_CODES.get(code) # type: ignore[arg-type] diff --git a/homeassistant/components/fronius/coordinator.py b/homeassistant/components/fronius/coordinator.py index 94fd5f256aa..fcf9ce0a389 100644 --- a/homeassistant/components/fronius/coordinator.py +++ b/homeassistant/components/fronius/coordinator.py @@ -49,8 +49,10 @@ class FroniusCoordinatorBase( """Set up the FroniusCoordinatorBase class.""" self._failed_update_count = 0 self.solar_net = solar_net - # unregistered_keys are used to create entities in platform module - self.unregistered_keys: dict[SolarNetId, set[str]] = {} + # unregistered_descriptors are used to create entities in platform module + self.unregistered_descriptors: dict[ + SolarNetId, list[FroniusSensorEntityDescription] + ] = {} super().__init__(*args, update_interval=self.default_interval, **kwargs) @abstractmethod @@ -73,11 +75,11 @@ class FroniusCoordinatorBase( self.update_interval = self.default_interval for solar_net_id in data: - if solar_net_id not in self.unregistered_keys: + if solar_net_id not in self.unregistered_descriptors: # id seen for the first time - self.unregistered_keys[solar_net_id] = { - desc.key for desc in self.valid_descriptions - } + self.unregistered_descriptors[ + solar_net_id + ] = self.valid_descriptions.copy() return data @callback @@ -92,22 +94,34 @@ class FroniusCoordinatorBase( """ @callback - def _add_entities_for_unregistered_keys() -> None: + def _add_entities_for_unregistered_descriptors() -> None: """Add entities for keys seen for the first time.""" - new_entities: list = [] + new_entities: list[_FroniusEntityT] = [] for solar_net_id, device_data in self.data.items(): - for key in self.unregistered_keys[solar_net_id].intersection( - device_data - ): - if device_data[key]["value"] is None: + remaining_unregistered_descriptors = [] + for description in self.unregistered_descriptors[solar_net_id]: + key = description.response_key or description.key + if key not in device_data: + remaining_unregistered_descriptors.append(description) continue - new_entities.append(entity_constructor(self, key, solar_net_id)) - self.unregistered_keys[solar_net_id].remove(key) + if device_data[key]["value"] is None: + remaining_unregistered_descriptors.append(description) + continue + new_entities.append( + entity_constructor( + coordinator=self, + description=description, + solar_net_id=solar_net_id, + ) + ) + self.unregistered_descriptors[ + solar_net_id + ] = remaining_unregistered_descriptors async_add_entities(new_entities) - _add_entities_for_unregistered_keys() + _add_entities_for_unregistered_descriptors() self.solar_net.cleanup_callbacks.append( - self.async_add_listener(_add_entities_for_unregistered_keys) + self.async_add_listener(_add_entities_for_unregistered_descriptors) ) diff --git a/homeassistant/components/fronius/sensor.py b/homeassistant/components/fronius/sensor.py index f11855ce7e2..f058a25a044 100644 --- a/homeassistant/components/fronius/sensor.py +++ b/homeassistant/components/fronius/sensor.py @@ -1,6 +1,7 @@ """Support for Fronius devices.""" from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Final @@ -30,7 +31,16 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, SOLAR_NET_DISCOVERY_NEW +from .const import ( + DOMAIN, + SOLAR_NET_DISCOVERY_NEW, + InverterStatusCodeOption, + MeterLocationCodeOption, + OhmPilotStateCodeOption, + get_inverter_status_message, + get_meter_location_description, + get_ohmpilot_state_message, +) if TYPE_CHECKING: from . import FroniusSolarNet @@ -102,6 +112,8 @@ class FroniusSensorEntityDescription(SensorEntityDescription): # Gen24 devices may report 0 for total energy while doing firmware updates. # Handling such values shall mitigate spikes in delta calculations. invalid_when_falsy: bool = False + response_key: str | None = None + value_fn: Callable[[StateType], StateType] | None = None INVERTER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [ @@ -198,6 +210,15 @@ INVERTER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [ FroniusSensorEntityDescription( key="status_code", entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + FroniusSensorEntityDescription( + key="status_message", + response_key="status_code", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.ENUM, + options=[opt.value for opt in InverterStatusCodeOption], + value_fn=get_inverter_status_message, ), FroniusSensorEntityDescription( key="led_state", @@ -306,6 +327,15 @@ METER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [ FroniusSensorEntityDescription( key="meter_location", entity_category=EntityCategory.DIAGNOSTIC, + value_fn=int, # type: ignore[arg-type] + ), + FroniusSensorEntityDescription( + key="meter_location_description", + response_key="meter_location", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.ENUM, + options=[opt.value for opt in MeterLocationCodeOption], + value_fn=get_meter_location_description, ), FroniusSensorEntityDescription( key="power_apparent_phase_1", @@ -495,7 +525,11 @@ OHMPILOT_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [ ), FroniusSensorEntityDescription( key="state_message", + response_key="state_code", entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.ENUM, + options=[opt.value for opt in OhmPilotStateCodeOption], + value_fn=get_ohmpilot_state_message, ), ] @@ -630,24 +664,22 @@ class _FroniusSensorEntity(CoordinatorEntity["FroniusCoordinatorBase"], SensorEn """Defines a Fronius coordinator entity.""" entity_description: FroniusSensorEntityDescription - entity_descriptions: list[FroniusSensorEntityDescription] _attr_has_entity_name = True def __init__( self, coordinator: FroniusCoordinatorBase, - key: str, + description: FroniusSensorEntityDescription, solar_net_id: str, ) -> None: """Set up an individual Fronius meter sensor.""" super().__init__(coordinator) - self.entity_description = next( - desc for desc in self.entity_descriptions if desc.key == key - ) + self.entity_description = description + self.response_key = description.response_key or description.key self.solar_net_id = solar_net_id self._attr_native_value = self._get_entity_value() - self._attr_translation_key = self.entity_description.key + self._attr_translation_key = description.key def _device_data(self) -> dict[str, Any]: """Extract information for SolarNet device from coordinator data.""" @@ -655,13 +687,13 @@ class _FroniusSensorEntity(CoordinatorEntity["FroniusCoordinatorBase"], SensorEn def _get_entity_value(self) -> Any: """Extract entity value from coordinator. Raises KeyError if not included in latest update.""" - new_value = self.coordinator.data[self.solar_net_id][ - self.entity_description.key - ]["value"] + new_value = self.coordinator.data[self.solar_net_id][self.response_key]["value"] if new_value is None: return self.entity_description.default_value if self.entity_description.invalid_when_falsy and not new_value: return None + if self.entity_description.value_fn is not None: + return self.entity_description.value_fn(new_value) if isinstance(new_value, float): return round(new_value, 4) return new_value @@ -681,54 +713,54 @@ class _FroniusSensorEntity(CoordinatorEntity["FroniusCoordinatorBase"], SensorEn class InverterSensor(_FroniusSensorEntity): """Defines a Fronius inverter device sensor entity.""" - entity_descriptions = INVERTER_ENTITY_DESCRIPTIONS - def __init__( self, coordinator: FroniusInverterUpdateCoordinator, - key: str, + description: FroniusSensorEntityDescription, solar_net_id: str, ) -> None: """Set up an individual Fronius inverter sensor.""" - super().__init__(coordinator, key, solar_net_id) + super().__init__(coordinator, description, solar_net_id) # device_info created in __init__ from a `GetInverterInfo` request self._attr_device_info = coordinator.inverter_info.device_info - self._attr_unique_id = f"{coordinator.inverter_info.unique_id}-{key}" + self._attr_unique_id = ( + f"{coordinator.inverter_info.unique_id}-{description.key}" + ) class LoggerSensor(_FroniusSensorEntity): """Defines a Fronius logger device sensor entity.""" - entity_descriptions = LOGGER_ENTITY_DESCRIPTIONS - def __init__( self, coordinator: FroniusLoggerUpdateCoordinator, - key: str, + description: FroniusSensorEntityDescription, solar_net_id: str, ) -> None: """Set up an individual Fronius meter sensor.""" - super().__init__(coordinator, key, solar_net_id) + super().__init__(coordinator, description, solar_net_id) logger_data = self._device_data() # Logger device is already created in FroniusSolarNet._create_solar_net_device self._attr_device_info = coordinator.solar_net.system_device_info - self._attr_native_unit_of_measurement = logger_data[key].get("unit") - self._attr_unique_id = f'{logger_data["unique_identifier"]["value"]}-{key}' + self._attr_native_unit_of_measurement = logger_data[self.response_key].get( + "unit" + ) + self._attr_unique_id = ( + f'{logger_data["unique_identifier"]["value"]}-{description.key}' + ) class MeterSensor(_FroniusSensorEntity): """Defines a Fronius meter device sensor entity.""" - entity_descriptions = METER_ENTITY_DESCRIPTIONS - def __init__( self, coordinator: FroniusMeterUpdateCoordinator, - key: str, + description: FroniusSensorEntityDescription, solar_net_id: str, ) -> None: """Set up an individual Fronius meter sensor.""" - super().__init__(coordinator, key, solar_net_id) + super().__init__(coordinator, description, solar_net_id) meter_data = self._device_data() # S0 meters connected directly to inverters respond "n.a." as serial number # `model` contains the inverter id: "S0 Meter at inverter 1" @@ -745,22 +777,20 @@ class MeterSensor(_FroniusSensorEntity): name=meter_data["model"]["value"], via_device=(DOMAIN, coordinator.solar_net.solar_net_device_id), ) - self._attr_unique_id = f"{meter_uid}-{key}" + self._attr_unique_id = f"{meter_uid}-{description.key}" class OhmpilotSensor(_FroniusSensorEntity): """Defines a Fronius Ohmpilot sensor entity.""" - entity_descriptions = OHMPILOT_ENTITY_DESCRIPTIONS - def __init__( self, coordinator: FroniusOhmpilotUpdateCoordinator, - key: str, + description: FroniusSensorEntityDescription, solar_net_id: str, ) -> None: """Set up an individual Fronius meter sensor.""" - super().__init__(coordinator, key, solar_net_id) + super().__init__(coordinator, description, solar_net_id) device_data = self._device_data() self._attr_device_info = DeviceInfo( @@ -771,45 +801,41 @@ class OhmpilotSensor(_FroniusSensorEntity): sw_version=device_data["software"]["value"], via_device=(DOMAIN, coordinator.solar_net.solar_net_device_id), ) - self._attr_unique_id = f'{device_data["serial"]["value"]}-{key}' + self._attr_unique_id = f'{device_data["serial"]["value"]}-{description.key}' class PowerFlowSensor(_FroniusSensorEntity): """Defines a Fronius power flow sensor entity.""" - entity_descriptions = POWER_FLOW_ENTITY_DESCRIPTIONS - def __init__( self, coordinator: FroniusPowerFlowUpdateCoordinator, - key: str, + description: FroniusSensorEntityDescription, solar_net_id: str, ) -> None: """Set up an individual Fronius power flow sensor.""" - super().__init__(coordinator, key, solar_net_id) + super().__init__(coordinator, description, solar_net_id) # SolarNet device is already created in FroniusSolarNet._create_solar_net_device self._attr_device_info = coordinator.solar_net.system_device_info self._attr_unique_id = ( - f"{coordinator.solar_net.solar_net_device_id}-power_flow-{key}" + f"{coordinator.solar_net.solar_net_device_id}-power_flow-{description.key}" ) class StorageSensor(_FroniusSensorEntity): """Defines a Fronius storage device sensor entity.""" - entity_descriptions = STORAGE_ENTITY_DESCRIPTIONS - def __init__( self, coordinator: FroniusStorageUpdateCoordinator, - key: str, + description: FroniusSensorEntityDescription, solar_net_id: str, ) -> None: """Set up an individual Fronius storage sensor.""" - super().__init__(coordinator, key, solar_net_id) + super().__init__(coordinator, description, solar_net_id) storage_data = self._device_data() - self._attr_unique_id = f'{storage_data["serial"]["value"]}-{key}' + self._attr_unique_id = f'{storage_data["serial"]["value"]}-{description.key}' self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, storage_data["serial"]["value"])}, manufacturer=storage_data["manufacturer"]["value"], diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index 4a0f96ed8e6..de066704644 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -66,6 +66,21 @@ "status_code": { "name": "Status code" }, + "status_message": { + "name": "Status message", + "state": { + "startup": "Startup", + "running": "Running", + "standby": "Standby", + "bootloading": "Bootloading", + "error": "Error", + "idle": "Idle", + "ready": "Ready", + "sleeping": "Sleeping", + "unknown": "Unknown", + "invalid": "Invalid" + } + }, "led_state": { "name": "LED state" }, @@ -114,6 +129,16 @@ "meter_location": { "name": "Meter location" }, + "meter_location_description": { + "name": "Meter location description", + "state": { + "feed_in": "Grid interconnection point", + "consumption_path": "Consumption path", + "external_generator": "External generator", + "external_battery": "External battery", + "subload": "Subload" + } + }, "power_apparent_phase_1": { "name": "Apparent power phase 1" }, @@ -193,7 +218,15 @@ "name": "State code" }, "state_message": { - "name": "State message" + "name": "State message", + "state": { + "up_and_running": "Up and running", + "keep_minimum_temperature": "Keep minimum temperature", + "legionella_protection": "Legionella protection", + "critical_fault": "Critical fault", + "fault": "Fault", + "boost_mode": "Boost mode" + } }, "meter_mode": { "name": "Meter mode" diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 469deab23e1..b6668383b54 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20231030.2"] + "requirements": ["home-assistant-frontend==20231130.0"] } diff --git a/homeassistant/components/frontend/storage.py b/homeassistant/components/frontend/storage.py index 82f169dc6c9..91646dcb745 100644 --- a/homeassistant/components/frontend/storage.py +++ b/homeassistant/components/frontend/storage.py @@ -1,7 +1,7 @@ """API for persistent storage for the frontend.""" from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Coroutine from functools import wraps from typing import Any @@ -50,12 +50,19 @@ async def async_user_store( return store, data[user_id] -def with_store(orig_func: Callable) -> Callable: +def with_store( + orig_func: Callable[ + [HomeAssistant, ActiveConnection, dict[str, Any], Store, dict[str, Any]], + Coroutine[Any, Any, None], + ], +) -> Callable[ + [HomeAssistant, ActiveConnection, dict[str, Any]], Coroutine[Any, Any, None] +]: """Decorate function to provide data.""" @wraps(orig_func) async def with_store_func( - hass: HomeAssistant, connection: ActiveConnection, msg: dict + hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Provide user specific data and store to function.""" user_id = connection.user.id diff --git a/homeassistant/components/frontier_silicon/strings.json b/homeassistant/components/frontier_silicon/strings.json index a10c3f535a1..03d9f28c016 100644 --- a/homeassistant/components/frontier_silicon/strings.json +++ b/homeassistant/components/frontier_silicon/strings.json @@ -5,10 +5,13 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your Frontier Silicon device." } }, "device_config": { - "title": "Device Configuration", + "title": "Device configuration", "description": "The pin can be found via 'MENU button > Main Menu > System setting > Network > NetRemote PIN setup'", "data": { "pin": "[%key:common::config_flow::data::pin%]" diff --git a/homeassistant/components/fully_kiosk/config_flow.py b/homeassistant/components/fully_kiosk/config_flow.py index 7d744214d93..4f9dadd6901 100644 --- a/homeassistant/components/fully_kiosk/config_flow.py +++ b/homeassistant/components/fully_kiosk/config_flow.py @@ -12,7 +12,13 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + CONF_SSL, + CONF_VERIFY_SSL, +) from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import format_mac @@ -31,13 +37,19 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): self._discovered_device_info: dict[str, Any] = {} async def _create_entry( - self, host: str, user_input: dict[str, Any], errors: dict[str, str] + self, + host: str, + user_input: dict[str, Any], + errors: dict[str, str], + description_placeholders: dict[str, str] | Any = None, ) -> FlowResult | None: fully = FullyKiosk( async_get_clientsession(self.hass), host, DEFAULT_PORT, user_input[CONF_PASSWORD], + use_ssl=user_input[CONF_SSL], + verify_ssl=user_input[CONF_VERIFY_SSL], ) try: @@ -50,10 +62,12 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): ) as error: LOGGER.debug(error.args, exc_info=True) errors["base"] = "cannot_connect" + description_placeholders["error_detail"] = str(error.args) return None except Exception as error: # pylint: disable=broad-except LOGGER.exception("Unexpected exception: %s", error) errors["base"] = "unknown" + description_placeholders["error_detail"] = str(error.args) return None await self.async_set_unique_id(device_info["deviceID"], raise_on_progress=False) @@ -64,6 +78,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): CONF_HOST: host, CONF_PASSWORD: user_input[CONF_PASSWORD], CONF_MAC: format_mac(device_info["Mac"]), + CONF_SSL: user_input[CONF_SSL], + CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL], }, ) @@ -72,8 +88,11 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): ) -> FlowResult: """Handle the initial step.""" errors: dict[str, str] = {} + placeholders: dict[str, str] = {} if user_input is not None: - result = await self._create_entry(user_input[CONF_HOST], user_input, errors) + result = await self._create_entry( + user_input[CONF_HOST], user_input, errors, placeholders + ) if result: return result @@ -83,8 +102,11 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): { vol.Required(CONF_HOST): str, vol.Required(CONF_PASSWORD): str, + vol.Optional(CONF_SSL, default=False): bool, + vol.Optional(CONF_VERIFY_SSL, default=False): bool, } ), + description_placeholders=placeholders, errors=errors, ) @@ -127,6 +149,8 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): data_schema=vol.Schema( { vol.Required(CONF_PASSWORD): str, + vol.Optional(CONF_SSL, default=False): bool, + vol.Optional(CONF_VERIFY_SSL, default=False): bool, } ), description_placeholders=placeholders, diff --git a/homeassistant/components/fully_kiosk/coordinator.py b/homeassistant/components/fully_kiosk/coordinator.py index 0cfc15268b4..203251351ae 100644 --- a/homeassistant/components/fully_kiosk/coordinator.py +++ b/homeassistant/components/fully_kiosk/coordinator.py @@ -6,7 +6,7 @@ from fullykiosk import FullyKiosk from fullykiosk.exceptions import FullyKioskError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PASSWORD +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_SSL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -19,11 +19,14 @@ class FullyKioskDataUpdateCoordinator(DataUpdateCoordinator): def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Initialize.""" + self.use_ssl = entry.data.get(CONF_SSL, False) self.fully = FullyKiosk( async_get_clientsession(hass), entry.data[CONF_HOST], DEFAULT_PORT, entry.data[CONF_PASSWORD], + use_ssl=self.use_ssl, + verify_ssl=entry.data.get(CONF_VERIFY_SSL, False), ) super().__init__( hass, diff --git a/homeassistant/components/fully_kiosk/entity.py b/homeassistant/components/fully_kiosk/entity.py index fcb6f35eb11..5fd9f75a6a0 100644 --- a/homeassistant/components/fully_kiosk/entity.py +++ b/homeassistant/components/fully_kiosk/entity.py @@ -1,7 +1,13 @@ """Base entity for the Fully Kiosk Browser integration.""" from __future__ import annotations +import json + +from yarl import URL + +from homeassistant.components import mqtt from homeassistant.const import ATTR_CONNECTIONS +from homeassistant.core import CALLBACK_TYPE, callback from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -30,13 +36,20 @@ class FullyKioskEntity(CoordinatorEntity[FullyKioskDataUpdateCoordinator], Entit def __init__(self, coordinator: FullyKioskDataUpdateCoordinator) -> None: """Initialize the Fully Kiosk Browser entity.""" super().__init__(coordinator=coordinator) + + url = URL.build( + scheme="https" if coordinator.use_ssl else "http", + host=coordinator.data["ip4"], + port=2323, + ) + device_info = DeviceInfo( identifiers={(DOMAIN, coordinator.data["deviceID"])}, name=coordinator.data["deviceName"], manufacturer=coordinator.data["deviceManufacturer"], model=coordinator.data["deviceModel"], sw_version=coordinator.data["appVersionName"], - configuration_url=f"http://{coordinator.data['ip4']}:2323", + configuration_url=str(url), ) if "Mac" in coordinator.data and valid_global_mac_address( coordinator.data["Mac"] @@ -45,3 +58,28 @@ class FullyKioskEntity(CoordinatorEntity[FullyKioskDataUpdateCoordinator], Entit (CONNECTION_NETWORK_MAC, coordinator.data["Mac"]) } self._attr_device_info = device_info + + async def mqtt_subscribe( + self, event: str | None, event_callback: CALLBACK_TYPE + ) -> CALLBACK_TYPE | None: + """Subscribe to MQTT for a given event.""" + data = self.coordinator.data + if ( + event is None + or not mqtt.mqtt_config_entry_enabled(self.hass) + or not data["settings"]["mqttEnabled"] + ): + return None + + @callback + def message_callback(message: mqtt.ReceiveMessage) -> None: + payload = json.loads(message.payload) + event_callback(**payload) + + topic_template = data["settings"]["mqttEventTopic"] + topic = ( + topic_template.replace("$appId", "fully") + .replace("$event", event) + .replace("$deviceId", data["deviceID"]) + ) + return await mqtt.async_subscribe(self.hass, topic, message_callback) diff --git a/homeassistant/components/fully_kiosk/manifest.json b/homeassistant/components/fully_kiosk/manifest.json index dcd36671fce..b5dadf14184 100644 --- a/homeassistant/components/fully_kiosk/manifest.json +++ b/homeassistant/components/fully_kiosk/manifest.json @@ -1,6 +1,7 @@ { "domain": "fully_kiosk", "name": "Fully Kiosk Browser", + "after_dependencies": ["mqtt"], "codeowners": ["@cgarwood"], "config_flow": true, "dhcp": [ diff --git a/homeassistant/components/fully_kiosk/strings.json b/homeassistant/components/fully_kiosk/strings.json index d61e8a7b7a8..c1a1ef1fcf0 100644 --- a/homeassistant/components/fully_kiosk/strings.json +++ b/homeassistant/components/fully_kiosk/strings.json @@ -10,13 +10,18 @@ "user": { "data": { "host": "[%key:common::config_flow::data::host%]", - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "ssl": "[%key:common::config_flow::data::ssl%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of the device running your Fully Kiosk Browser application." } } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "cannot_connect": "Cannot connect. Details: {error_detail}", + "unknown": "Unknown. Details: {error_detail}" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" diff --git a/homeassistant/components/fully_kiosk/switch.py b/homeassistant/components/fully_kiosk/switch.py index 500e154abd8..c1d5d4e5c75 100644 --- a/homeassistant/components/fully_kiosk/switch.py +++ b/homeassistant/components/fully_kiosk/switch.py @@ -10,7 +10,7 @@ from fullykiosk import FullyKiosk from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -25,6 +25,8 @@ class FullySwitchEntityDescriptionMixin: on_action: Callable[[FullyKiosk], Any] off_action: Callable[[FullyKiosk], Any] is_on_fn: Callable[[dict[str, Any]], Any] + mqtt_on_event: str | None + mqtt_off_event: str | None @dataclass @@ -41,6 +43,8 @@ SWITCHES: tuple[FullySwitchEntityDescription, ...] = ( on_action=lambda fully: fully.startScreensaver(), off_action=lambda fully: fully.stopScreensaver(), is_on_fn=lambda data: data.get("isInScreensaver"), + mqtt_on_event="onScreensaverStart", + mqtt_off_event="onScreensaverStop", ), FullySwitchEntityDescription( key="maintenance", @@ -49,6 +53,8 @@ SWITCHES: tuple[FullySwitchEntityDescription, ...] = ( on_action=lambda fully: fully.enableLockedMode(), off_action=lambda fully: fully.disableLockedMode(), is_on_fn=lambda data: data.get("maintenanceMode"), + mqtt_on_event=None, + mqtt_off_event=None, ), FullySwitchEntityDescription( key="kiosk", @@ -57,6 +63,8 @@ SWITCHES: tuple[FullySwitchEntityDescription, ...] = ( on_action=lambda fully: fully.lockKiosk(), off_action=lambda fully: fully.unlockKiosk(), is_on_fn=lambda data: data.get("kioskLocked"), + mqtt_on_event=None, + mqtt_off_event=None, ), FullySwitchEntityDescription( key="motion-detection", @@ -65,6 +73,8 @@ SWITCHES: tuple[FullySwitchEntityDescription, ...] = ( on_action=lambda fully: fully.enableMotionDetection(), off_action=lambda fully: fully.disableMotionDetection(), is_on_fn=lambda data: data["settings"].get("motionDetection"), + mqtt_on_event=None, + mqtt_off_event=None, ), FullySwitchEntityDescription( key="screenOn", @@ -72,6 +82,8 @@ SWITCHES: tuple[FullySwitchEntityDescription, ...] = ( on_action=lambda fully: fully.screenOn(), off_action=lambda fully: fully.screenOff(), is_on_fn=lambda data: data.get("screenOn"), + mqtt_on_event="screenOn", + mqtt_off_event="screenOff", ), ) @@ -105,13 +117,27 @@ class FullySwitchEntity(FullyKioskEntity, SwitchEntity): super().__init__(coordinator) self.entity_description = description self._attr_unique_id = f"{coordinator.data['deviceID']}-{description.key}" + self._turned_on_subscription: CALLBACK_TYPE | None = None + self._turned_off_subscription: CALLBACK_TYPE | None = None - @property - def is_on(self) -> bool | None: - """Return true if the entity is on.""" - if (is_on := self.entity_description.is_on_fn(self.coordinator.data)) is None: - return None - return bool(is_on) + async def async_added_to_hass(self) -> None: + """When entity is added to hass.""" + await super().async_added_to_hass() + description = self.entity_description + self._turned_on_subscription = await self.mqtt_subscribe( + description.mqtt_off_event, self._turn_off + ) + self._turned_off_subscription = await self.mqtt_subscribe( + description.mqtt_on_event, self._turn_on + ) + + async def async_will_remove_from_hass(self) -> None: + """Close MQTT subscriptions when removed.""" + await super().async_will_remove_from_hass() + if self._turned_off_subscription is not None: + self._turned_off_subscription() + if self._turned_on_subscription is not None: + self._turned_on_subscription() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" @@ -122,3 +148,19 @@ class FullySwitchEntity(FullyKioskEntity, SwitchEntity): """Turn the entity off.""" await self.entity_description.off_action(self.coordinator.fully) await self.coordinator.async_refresh() + + def _turn_off(self, **kwargs: Any) -> None: + """Optimistically turn off.""" + self._attr_is_on = False + self.async_write_ha_state() + + def _turn_on(self, **kwargs: Any) -> None: + """Optimistically turn on.""" + self._attr_is_on = True + self.async_write_ha_state() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._attr_is_on = bool(self.entity_description.is_on_fn(self.coordinator.data)) + self.async_write_ha_state() diff --git a/homeassistant/components/garages_amsterdam/manifest.json b/homeassistant/components/garages_amsterdam/manifest.json index 3f4ffc7fae1..3ce96152337 100644 --- a/homeassistant/components/garages_amsterdam/manifest.json +++ b/homeassistant/components/garages_amsterdam/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/garages_amsterdam", "iot_class": "cloud_polling", - "requirements": ["odp-amsterdam==5.3.1"] + "requirements": ["odp-amsterdam==6.0.0"] } diff --git a/homeassistant/components/gdacs/sensor.py b/homeassistant/components/gdacs/sensor.py index 5d5589c54d6..8a0a0113ced 100644 --- a/homeassistant/components/gdacs/sensor.py +++ b/homeassistant/components/gdacs/sensor.py @@ -7,6 +7,7 @@ import logging from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util @@ -44,12 +45,14 @@ class GdacsSensor(SensorEntity): _attr_should_poll = False _attr_icon = DEFAULT_ICON _attr_native_unit_of_measurement = DEFAULT_UNIT_OF_MEASUREMENT + _attr_has_entity_name = True + _attr_name = None def __init__(self, config_entry: ConfigEntry, manager) -> None: """Initialize entity.""" + assert config_entry.unique_id self._config_entry_id = config_entry.entry_id self._attr_unique_id = config_entry.unique_id - self._attr_name = f"GDACS ({config_entry.title})" self._manager = manager self._status = None self._last_update = None @@ -60,6 +63,11 @@ class GdacsSensor(SensorEntity): self._updated = None self._removed = None self._remove_signal_status: Callable[[], None] | None = None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, config_entry.unique_id)}, + entry_type=DeviceEntryType.SERVICE, + manufacturer="GDACS", + ) async def async_added_to_hass(self) -> None: """Call when entity is added to hass.""" diff --git a/homeassistant/components/generic/camera.py b/homeassistant/components/generic/camera.py index 621566a70f5..9ffd873efd6 100644 --- a/homeassistant/components/generic/camera.py +++ b/homeassistant/components/generic/camera.py @@ -33,6 +33,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv, template as template_helper +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -172,6 +173,11 @@ class GenericCamera(Camera): self._last_url = None self._last_image = None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, identifier)}, + manufacturer="Generic", + ) + @property def use_stream_for_stills(self) -> bool: """Whether or not to use stream to generate stills.""" diff --git a/homeassistant/components/gios/manifest.json b/homeassistant/components/gios/manifest.json index 18ea52fc15f..2e33bc6741e 100644 --- a/homeassistant/components/gios/manifest.json +++ b/homeassistant/components/gios/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["dacite", "gios"], "quality_scale": "platinum", - "requirements": ["gios==3.2.1"] + "requirements": ["gios==3.2.2"] } diff --git a/homeassistant/components/glances/strings.json b/homeassistant/components/glances/strings.json index fdd0c44b31b..1bab098d65f 100644 --- a/homeassistant/components/glances/strings.json +++ b/homeassistant/components/glances/strings.json @@ -10,6 +10,9 @@ "version": "Glances API Version (2 or 3)", "ssl": "[%key:common::config_flow::data::ssl%]", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of the system running your Glances system monitor." } }, "reauth_confirm": { diff --git a/homeassistant/components/goalzero/strings.json b/homeassistant/components/goalzero/strings.json index d94f5219607..c6d85bd4c10 100644 --- a/homeassistant/components/goalzero/strings.json +++ b/homeassistant/components/goalzero/strings.json @@ -6,6 +6,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "name": "[%key:common::config_flow::data::name%]" + }, + "data_description": { + "host": "The hostname or IP address of your Goal Zero Yeti." } }, "confirm_discovery": { diff --git a/homeassistant/components/goodwe/sensor.py b/homeassistant/components/goodwe/sensor.py index 332280bac5a..0065d70dda9 100644 --- a/homeassistant/components/goodwe/sensor.py +++ b/homeassistant/components/goodwe/sensor.py @@ -79,12 +79,12 @@ _ICONS: dict[SensorKind, str] = { class GoodweSensorEntityDescription(SensorEntityDescription): """Class describing Goodwe sensor entities.""" - value: Callable[ - [GoodweUpdateCoordinator, str], Any - ] = lambda coordinator, sensor: coordinator.sensor_value(sensor) - available: Callable[ - [GoodweUpdateCoordinator], bool - ] = lambda coordinator: coordinator.last_update_success + value: Callable[[GoodweUpdateCoordinator, str], Any] = ( + lambda coordinator, sensor: coordinator.sensor_value(sensor) + ) + available: Callable[[GoodweUpdateCoordinator], bool] = ( + lambda coordinator: coordinator.last_update_success + ) _DESCRIPTIONS: dict[str, GoodweSensorEntityDescription] = { diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index fc9107bb8d2..27e462a380e 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/calendar.google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==6.0.1", "oauth2client==4.1.3"] + "requirements": ["gcal-sync==6.0.3", "oauth2client==4.1.3"] } diff --git a/homeassistant/components/google_assistant/helpers.py b/homeassistant/components/google_assistant/helpers.py index b2cda5522ee..c89925664e0 100644 --- a/homeassistant/components/google_assistant/helpers.py +++ b/homeassistant/components/google_assistant/helpers.py @@ -15,7 +15,7 @@ from aiohttp.web import json_response from awesomeversion import AwesomeVersion from yarl import URL -from homeassistant.components import webhook +from homeassistant.components import matter, webhook from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_SUPPORTED_FEATURES, @@ -59,7 +59,11 @@ LOCAL_SDK_MIN_VERSION = AwesomeVersion("2.1.5") @callback def _get_registry_entries( hass: HomeAssistant, entity_id: str -) -> tuple[er.RegistryEntry | None, dr.DeviceEntry | None, ar.AreaEntry | None,]: +) -> tuple[ + er.RegistryEntry | None, + dr.DeviceEntry | None, + ar.AreaEntry | None, +]: """Get registry entries.""" ent_reg = er.async_get(hass) dev_reg = dr.async_get(hass) @@ -678,10 +682,22 @@ class GoogleEntity: elif area_entry and area_entry.name: device["roomHint"] = area_entry.name - # Add deviceInfo if not device_entry: return device + # Add Matter info + if ( + "matter" in self.hass.config.components + and any(x for x in device_entry.identifiers if x[0] == "matter") + and ( + matter_info := matter.get_matter_device_info(self.hass, device_entry.id) + ) + ): + device["matterUniqueId"] = matter_info["unique_id"] + device["matterOriginalVendorId"] = matter_info["vendor_id"] + device["matterOriginalProductId"] = matter_info["product_id"] + + # Add deviceInfo device_info = {} if device_entry.manufacturer: diff --git a/homeassistant/components/google_assistant/manifest.json b/homeassistant/components/google_assistant/manifest.json index 3c7ac043441..e36f6a1ca87 100644 --- a/homeassistant/components/google_assistant/manifest.json +++ b/homeassistant/components/google_assistant/manifest.json @@ -1,7 +1,7 @@ { "domain": "google_assistant", "name": "Google Assistant", - "after_dependencies": ["camera"], + "after_dependencies": ["camera", "matter"], "codeowners": ["@home-assistant/cloud"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/google_assistant", diff --git a/homeassistant/components/google_generative_ai_conversation/__init__.py b/homeassistant/components/google_generative_ai_conversation/__init__.py index 1154c7132d2..c507e0c046d 100644 --- a/homeassistant/components/google_generative_ai_conversation/__init__.py +++ b/homeassistant/components/google_generative_ai_conversation/__init__.py @@ -88,7 +88,7 @@ class GoogleGenerativeAIAgent(conversation.AbstractConversationAgent): conversation_id = user_input.conversation_id messages = self.history[conversation_id] else: - conversation_id = ulid.ulid() + conversation_id = ulid.ulid_now() messages = [] try: diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 01ceb0349e6..130c0d2cc01 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -1,8 +1,8 @@ """Google Tasks todo platform.""" from __future__ import annotations -from datetime import timedelta -from typing import cast +from datetime import date, datetime, timedelta +from typing import Any, cast from homeassistant.components.todo import ( TodoItem, @@ -14,6 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util import dt as dt_util from .api import AsyncConfigEntryAuth from .const import DOMAIN @@ -35,9 +36,31 @@ def _convert_todo_item(item: TodoItem) -> dict[str, str]: result["title"] = item.summary if item.status is not None: result["status"] = TODO_STATUS_MAP_INV[item.status] + if (due := item.due) is not None: + # due API field is a timestamp string, but with only date resolution + result["due"] = dt_util.start_of_local_day(due).isoformat() + if (description := item.description) is not None: + result["notes"] = description return result +def _convert_api_item(item: dict[str, str]) -> TodoItem: + """Convert tasks API items into a TodoItem.""" + due: date | None = None + if (due_str := item.get("due")) is not None: + due = datetime.fromisoformat(due_str).date() + return TodoItem( + summary=item["title"], + uid=item["id"], + status=TODO_STATUS_MAP.get( + item.get("status", ""), + TodoItemStatus.NEEDS_ACTION, + ), + due=due, + description=item.get("notes"), + ) + + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: @@ -68,6 +91,8 @@ class GoogleTaskTodoListEntity( TodoListEntityFeature.CREATE_TODO_ITEM | TodoListEntityFeature.UPDATE_TODO_ITEM | TodoListEntityFeature.DELETE_TODO_ITEM + | TodoListEntityFeature.SET_DUE_DATE_ON_ITEM + | TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM ) def __init__( @@ -88,16 +113,7 @@ class GoogleTaskTodoListEntity( """Get the current set of To-do items.""" if self.coordinator.data is None: return None - return [ - TodoItem( - summary=item["title"], - uid=item["id"], - status=TODO_STATUS_MAP.get( - item.get("status"), TodoItemStatus.NEEDS_ACTION # type: ignore[arg-type] - ), - ) - for item in self.coordinator.data - ] + return [_convert_api_item(item) for item in _order_tasks(self.coordinator.data)] async def async_create_todo_item(self, item: TodoItem) -> None: """Add an item to the To-do list.""" @@ -121,3 +137,16 @@ class GoogleTaskTodoListEntity( """Delete To-do items.""" await self.coordinator.api.delete(self._task_list_id, uids) await self.coordinator.async_refresh() + + +def _order_tasks(tasks: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Order the task items response. + + All tasks have an order amongst their sibblings based on position. + + Home Assistant To-do items do not support the Google Task parent/sibbling + relationships and the desired behavior is for them to be filtered. + """ + parents = [task for task in tasks if task.get("parent") is None] + parents.sort(key=lambda task: task["position"]) + return parents diff --git a/homeassistant/components/gree/__init__.py b/homeassistant/components/gree/__init__.py index ff3438ed53f..13e93d780b2 100644 --- a/homeassistant/components/gree/__init__.py +++ b/homeassistant/components/gree/__init__.py @@ -11,7 +11,6 @@ from homeassistant.helpers.event import async_track_time_interval from .bridge import DiscoveryService from .const import ( COORDINATORS, - DATA_DISCOVERY_INTERVAL, DATA_DISCOVERY_SERVICE, DISCOVERY_SCAN_INTERVAL, DISPATCHERS, @@ -29,7 +28,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: gree_discovery = DiscoveryService(hass) hass.data[DATA_DISCOVERY_SERVICE] = gree_discovery - hass.data[DOMAIN].setdefault(DISPATCHERS, []) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) async def _async_scan_update(_=None): @@ -39,8 +37,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.debug("Scanning network for Gree devices") await _async_scan_update() - hass.data[DOMAIN][DATA_DISCOVERY_INTERVAL] = async_track_time_interval( - hass, _async_scan_update, timedelta(seconds=DISCOVERY_SCAN_INTERVAL) + entry.async_on_unload( + async_track_time_interval( + hass, _async_scan_update, timedelta(seconds=DISCOVERY_SCAN_INTERVAL) + ) ) return True @@ -48,13 +48,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - if hass.data[DOMAIN].get(DISPATCHERS) is not None: - for cleanup in hass.data[DOMAIN][DISPATCHERS]: - cleanup() - - if hass.data[DOMAIN].get(DATA_DISCOVERY_INTERVAL) is not None: - hass.data[DOMAIN].pop(DATA_DISCOVERY_INTERVAL)() - if hass.data.get(DATA_DISCOVERY_SERVICE) is not None: hass.data.pop(DATA_DISCOVERY_SERVICE) diff --git a/homeassistant/components/gree/climate.py b/homeassistant/components/gree/climate.py index b14b9cfaba4..ba162173724 100644 --- a/homeassistant/components/gree/climate.py +++ b/homeassistant/components/gree/climate.py @@ -47,7 +47,6 @@ from .bridge import DeviceDataUpdateCoordinator from .const import ( COORDINATORS, DISPATCH_DEVICE_DISCOVERED, - DISPATCHERS, DOMAIN, FAN_MEDIUM_HIGH, FAN_MEDIUM_LOW, @@ -88,7 +87,7 @@ SWING_MODES = [SWING_OFF, SWING_VERTICAL, SWING_HORIZONTAL, SWING_BOTH] async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Gree HVAC device from a config entry.""" @@ -101,7 +100,7 @@ async def async_setup_entry( for coordinator in hass.data[DOMAIN][COORDINATORS]: init_device(coordinator) - hass.data[DOMAIN][DISPATCHERS].append( + entry.async_on_unload( async_dispatcher_connect(hass, DISPATCH_DEVICE_DISCOVERED, init_device) ) diff --git a/homeassistant/components/gree/const.py b/homeassistant/components/gree/const.py index b4df7a1acde..46479210921 100644 --- a/homeassistant/components/gree/const.py +++ b/homeassistant/components/gree/const.py @@ -3,7 +3,6 @@ COORDINATORS = "coordinators" DATA_DISCOVERY_SERVICE = "gree_discovery" -DATA_DISCOVERY_INTERVAL = "gree_discovery_interval" DISCOVERY_SCAN_INTERVAL = 300 DISCOVERY_TIMEOUT = 8 diff --git a/homeassistant/components/gree/switch.py b/homeassistant/components/gree/switch.py index 68c11ad6e1f..7916df18abc 100644 --- a/homeassistant/components/gree/switch.py +++ b/homeassistant/components/gree/switch.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import COORDINATORS, DISPATCH_DEVICE_DISCOVERED, DISPATCHERS, DOMAIN +from .const import COORDINATORS, DISPATCH_DEVICE_DISCOVERED, DOMAIN from .entity import GreeEntity @@ -102,7 +102,7 @@ GREE_SWITCHES: tuple[GreeSwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Gree HVAC device from a config entry.""" @@ -119,7 +119,7 @@ async def async_setup_entry( for coordinator in hass.data[DOMAIN][COORDINATORS]: init_device(coordinator) - hass.data[DOMAIN][DISPATCHERS].append( + entry.async_on_unload( async_dispatcher_connect(hass, DISPATCH_DEVICE_DISCOVERED, init_device) ) diff --git a/homeassistant/components/growatt_server/manifest.json b/homeassistant/components/growatt_server/manifest.json index a21c811af47..d872474f1da 100644 --- a/homeassistant/components/growatt_server/manifest.json +++ b/homeassistant/components/growatt_server/manifest.json @@ -1,7 +1,7 @@ { "domain": "growatt_server", "name": "Growatt", - "codeowners": ["@muppet3000"], + "codeowners": [], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/growatt_server", "iot_class": "cloud_polling", diff --git a/homeassistant/components/guardian/__init__.py b/homeassistant/components/guardian/__init__.py index d7a9fe4e836..bd2cb8c96de 100644 --- a/homeassistant/components/guardian/__init__.py +++ b/homeassistant/components/guardian/__init__.py @@ -2,9 +2,9 @@ from __future__ import annotations import asyncio -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Coroutine from dataclasses import dataclass -from typing import cast +from typing import Any, cast from aioguardian import Client from aioguardian.errors import GuardianError @@ -170,7 +170,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @callback - def call_with_data(func: Callable) -> Callable: + def call_with_data( + func: Callable[[ServiceCall, GuardianData], Coroutine[Any, Any, None]] + ) -> Callable[[ServiceCall], Coroutine[Any, Any, None]]: """Hydrate a service call with the appropriate GuardianData object.""" async def wrapper(call: ServiceCall) -> None: diff --git a/homeassistant/components/harmony/strings.json b/homeassistant/components/harmony/strings.json index 9ae22090d7f..f6862ca3c83 100644 --- a/homeassistant/components/harmony/strings.json +++ b/homeassistant/components/harmony/strings.json @@ -7,6 +7,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "name": "Hub Name" + }, + "data_description": { + "host": "The hostname or IP address of your Logitech Harmony Hub." } }, "link": { @@ -42,6 +45,16 @@ } } }, + "issues": { + "deprecated_switches": { + "title": "The Logitech Harmony switch platform is being removed", + "description": "Using the switch platform to change the current activity is now deprecated and will be removed in a future version of Home Assistant.\n\nPlease adjust any automations or scripts that use switch entities to instead use the select entity." + }, + "deprecated_switches_entity": { + "title": "Deprecated Harmony entity detected in {info}", + "description": "Your Harmony entity `{entity}` is being used in `{info}`. A select entity is available and should be used going forward.\n\nPlease adjust `{info}` to fix this issue." + } + }, "services": { "sync": { "name": "Sync", diff --git a/homeassistant/components/harmony/switch.py b/homeassistant/components/harmony/switch.py index acd04596bd5..6b833df9720 100644 --- a/homeassistant/components/harmony/switch.py +++ b/homeassistant/components/harmony/switch.py @@ -1,12 +1,15 @@ """Support for Harmony Hub activities.""" import logging -from typing import Any +from typing import Any, cast -from homeassistant.components.switch import SwitchEntity +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SwitchEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import DOMAIN, HARMONY_DATA from .data import HarmonyData @@ -20,6 +23,15 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up harmony activity switches.""" + async_create_issue( + hass, + DOMAIN, + "deprecated_switches", + breaks_in_ha_version="2024.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_switches", + ) data = hass.data[DOMAIN][entry.entry_id][HARMONY_DATA] activities = data.activities @@ -72,6 +84,22 @@ class HarmonyActivitySwitch(HarmonyEntity, SwitchEntity): ) ) ) + entity_automations = automations_with_entity(self.hass, self.entity_id) + entity_scripts = scripts_with_entity(self.hass, self.entity_id) + for item in entity_automations + entity_scripts: + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_switches_{self.entity_id}_{item}", + breaks_in_ha_version="2024.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_switches_entity", + translation_placeholders={ + "entity": f"{SWITCH_DOMAIN}.{cast(str, self.name).lower().replace(' ', '_')}", + "info": item, + }, + ) @callback def _async_activity_update(self, activity_info: tuple): diff --git a/homeassistant/components/hassio/http.py b/homeassistant/components/hassio/http.py index 419d80484cf..9d72d5842fd 100644 --- a/homeassistant/components/hassio/http.py +++ b/homeassistant/components/hassio/http.py @@ -6,6 +6,7 @@ from http import HTTPStatus import logging import os import re +from typing import TYPE_CHECKING from urllib.parse import quote, unquote import aiohttp @@ -156,6 +157,9 @@ class HassIOView(HomeAssistantView): # _stored_content_type is only computed once `content_type` is accessed if path == "backups/new/upload": # We need to reuse the full content type that includes the boundary + if TYPE_CHECKING: + # pylint: disable-next=protected-access + assert isinstance(request._stored_content_type, str) # pylint: disable-next=protected-access headers[CONTENT_TYPE] = request._stored_content_type diff --git a/homeassistant/components/hassio/ingress.py b/homeassistant/components/hassio/ingress.py index b29f80ff2b3..751e9005809 100644 --- a/homeassistant/components/hassio/ingress.py +++ b/homeassistant/components/hassio/ingress.py @@ -17,7 +17,6 @@ from yarl import URL from homeassistant.components.http import HomeAssistantView from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.aiohttp_compat import enable_compression from homeassistant.helpers.typing import UNDEFINED from .const import X_HASS_SOURCE, X_INGRESS_PATH @@ -172,7 +171,7 @@ class HassIOIngress(HomeAssistantView): content_length = result.headers.get(hdrs.CONTENT_LENGTH, UNDEFINED) # Avoid parsing content_type in simple cases for better performance if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE): - content_type = (maybe_content_type.partition(";"))[0].strip() + content_type: str = (maybe_content_type.partition(";"))[0].strip() else: content_type = result.content_type # Simple request @@ -188,11 +187,12 @@ class HassIOIngress(HomeAssistantView): status=result.status, content_type=content_type, body=body, + zlib_executor_size=32768, ) if content_length_int > MIN_COMPRESSED_SIZE and should_compress( content_type or simple_response.content_type ): - enable_compression(simple_response) + simple_response.enable_compression() await simple_response.prepare(request) return simple_response diff --git a/homeassistant/components/hassio/repairs.py b/homeassistant/components/hassio/repairs.py index 8337405641c..fcfe23dda6e 100644 --- a/homeassistant/components/hassio/repairs.py +++ b/homeassistant/components/hassio/repairs.py @@ -1,6 +1,7 @@ """Repairs implementation for supervisor integration.""" +from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Coroutine from types import MethodType from typing import Any @@ -116,7 +117,12 @@ class SupervisorIssueRepairFlow(RepairsFlow): return self.async_create_entry(data={}) @staticmethod - def _async_step(suggestion: Suggestion) -> Callable: + def _async_step( + suggestion: Suggestion, + ) -> Callable[ + [SupervisorIssueRepairFlow, dict[str, str] | None], + Coroutine[Any, Any, FlowResult], + ]: """Generate a step handler for a suggestion.""" async def _async_step( diff --git a/homeassistant/components/hdmi_cec/__init__.py b/homeassistant/components/hdmi_cec/__init__.py index 19621e28d03..54ea2f3e5bd 100644 --- a/homeassistant/components/hdmi_cec/__init__.py +++ b/homeassistant/components/hdmi_cec/__init__.py @@ -195,9 +195,7 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901 loop = ( # Create own thread if more than 1 CPU - hass.loop - if multiprocessing.cpu_count() < 2 - else None + hass.loop if multiprocessing.cpu_count() < 2 else None ) host = base_config[DOMAIN].get(CONF_HOST) display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME) diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index 7bd362cf3d7..df18fc7834a 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -6,6 +6,9 @@ "description": "Please enter the host name or IP address of a Heos device (preferably one connected via wire to the network).", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your HEOS device." } } }, diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index f5b97a7fb13..9eab92dce5c 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -21,7 +21,7 @@ import homeassistant.util.dt as dt_util from . import websocket_api from .const import DOMAIN -from .helpers import entities_may_have_state_changes_after +from .helpers import entities_may_have_state_changes_after, has_recorder_run_after CONF_ORDER = "use_include_order" @@ -106,7 +106,8 @@ class HistoryPeriodView(HomeAssistantView): no_attributes = "no_attributes" in request.query if ( - not include_start_time_state + (end_time and not has_recorder_run_after(hass, end_time)) + or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( hass, entity_ids, start_time, no_attributes diff --git a/homeassistant/components/history/helpers.py b/homeassistant/components/history/helpers.py index 523b1fafb7f..7e28e69e5f9 100644 --- a/homeassistant/components/history/helpers.py +++ b/homeassistant/components/history/helpers.py @@ -4,6 +4,8 @@ from __future__ import annotations from collections.abc import Iterable from datetime import datetime as dt +from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -21,3 +23,10 @@ def entities_may_have_state_changes_after( return True return False + + +def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: + """Check if the recorder has any runs after a specific time.""" + return run_time >= process_timestamp( + get_instance(hass).recorder_runs_manager.first.start + ) diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index 24ec07b6a87..4be63f29c02 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -39,7 +39,7 @@ from homeassistant.helpers.typing import EventType import homeassistant.util.dt as dt_util from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES -from .helpers import entities_may_have_state_changes_after +from .helpers import entities_may_have_state_changes_after, has_recorder_run_after _LOGGER = logging.getLogger(__name__) @@ -142,7 +142,8 @@ async def ws_get_history_during_period( no_attributes = msg["no_attributes"] if ( - not include_start_time_state + (end_time and not has_recorder_run_after(hass, end_time)) + or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( hass, entity_ids, start_time, no_attributes diff --git a/homeassistant/components/hlk_sw16/strings.json b/homeassistant/components/hlk_sw16/strings.json index d6e3212b4ea..ba74547e355 100644 --- a/homeassistant/components/hlk_sw16/strings.json +++ b/homeassistant/components/hlk_sw16/strings.json @@ -6,6 +6,9 @@ "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your Hi-Link HLK-SW-16 device." } } }, diff --git a/homeassistant/components/homeassistant/scene.py b/homeassistant/components/homeassistant/scene.py index 4b694d2b97a..3308083f22f 100644 --- a/homeassistant/components/homeassistant/scene.py +++ b/homeassistant/components/homeassistant/scene.py @@ -29,14 +29,17 @@ from homeassistant.core import ( State, callback, ) -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_per_platform, config_validation as cv, entity_platform, ) from homeassistant.helpers.entity_platform import AddEntitiesCallback, EntityPlatform -from homeassistant.helpers.service import async_register_admin_service +from homeassistant.helpers.service import ( + async_extract_entity_ids, + async_register_admin_service, +) from homeassistant.helpers.state import async_reproduce_state from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.loader import async_get_integration @@ -125,6 +128,7 @@ CREATE_SCENE_SCHEMA = vol.All( SERVICE_APPLY = "apply" SERVICE_CREATE = "create" +SERVICE_DELETE = "delete" _LOGGER = logging.getLogger(__name__) @@ -194,7 +198,9 @@ async def async_setup_platform( integration = await async_get_integration(hass, SCENE_DOMAIN) - conf = await conf_util.async_process_component_config(hass, config, integration) + conf = await conf_util.async_process_component_and_handle_errors( + hass, config, integration + ) if not (conf and platform): return @@ -271,6 +277,41 @@ async def async_setup_platform( SCENE_DOMAIN, SERVICE_CREATE, create_service, CREATE_SCENE_SCHEMA ) + async def delete_service(call: ServiceCall) -> None: + """Delete a dynamically created scene.""" + entity_ids = await async_extract_entity_ids(hass, call) + + for entity_id in entity_ids: + scene = platform.entities.get(entity_id) + if scene is None: + raise ServiceValidationError( + f"{entity_id} is not a valid scene entity_id", + translation_domain=SCENE_DOMAIN, + translation_key="entity_not_scene", + translation_placeholders={ + "entity_id": entity_id, + }, + ) + assert isinstance(scene, HomeAssistantScene) + if not scene.from_service: + raise ServiceValidationError( + f"The scene {entity_id} is not created with service `scene.create`", + translation_domain=SCENE_DOMAIN, + translation_key="entity_not_dynamically_created", + translation_placeholders={ + "entity_id": entity_id, + }, + ) + + await platform.async_remove_entity(entity_id) + + hass.services.async_register( + SCENE_DOMAIN, + SERVICE_DELETE, + delete_service, + cv.make_entity_service_schema({}), + ) + def _process_scenes_config( hass: HomeAssistant, async_add_entities: AddEntitiesCallback, config: dict[str, Any] diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index f14d9f8148c..6981bdfe685 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -138,6 +138,36 @@ } }, "exceptions": { + "component_import_err": { + "message": "Unable to import {domain}: {error}" + }, + "config_platform_import_err": { + "message": "Error importing config platform {domain}: {error}" + }, + "config_validation_err": { + "message": "Invalid config for integration {domain} at {config_file}, line {line}: {error}. Check the logs for more information." + }, + "config_validator_unknown_err": { + "message": "Unknown error calling {domain} config validator. Check the logs for more information." + }, + "config_schema_unknown_err": { + "message": "Unknown error calling {domain} CONFIG_SCHEMA. Check the logs for more information." + }, + "integration_config_error": { + "message": "Failed to process config for integration {domain} due to multiple ({errors}) errors. Check the logs for more information." + }, + "platform_component_load_err": { + "message": "Platform error: {domain} - {error}. Check the logs for more information." + }, + "platform_component_load_exc": { + "message": "Platform error: {domain} - {error}. Check the logs for more information." + }, + "platform_config_validation_err": { + "message": "Invalid config for {domain} from integration {p_name} at file {config_file}, line {line}: {error}. Check the logs for more information." + }, + "platform_schema_validator_err": { + "message": "Unknown error when validating config for {domain} from integration {p_name}" + }, "service_not_found": { "message": "Service {domain}.{service} not found." } diff --git a/homeassistant/components/homekit/type_fans.py b/homeassistant/components/homekit/type_fans.py index 9b27653e4cf..d371998aaf8 100644 --- a/homeassistant/components/homekit/type_fans.py +++ b/homeassistant/components/homekit/type_fans.py @@ -124,12 +124,15 @@ class Fan(HomeAccessory): ), ) + setter_callback = ( + lambda value, preset_mode=preset_mode: self.set_preset_mode( + value, preset_mode + ) + ) self.preset_mode_chars[preset_mode] = preset_serv.configure_char( CHAR_ON, value=False, - setter_callback=lambda value, preset_mode=preset_mode: self.set_preset_mode( - value, preset_mode - ), + setter_callback=setter_callback, ) if CHAR_SWING_MODE in self.chars: diff --git a/homeassistant/components/homewizard/helpers.py b/homeassistant/components/homewizard/helpers.py index 3f7fc064931..4f12a4f9726 100644 --- a/homeassistant/components/homewizard/helpers.py +++ b/homeassistant/components/homewizard/helpers.py @@ -8,6 +8,7 @@ from homewizard_energy.errors import DisabledError, RequestError from homeassistant.exceptions import HomeAssistantError +from .const import DOMAIN from .entity import HomeWizardEntity _HomeWizardEntityT = TypeVar("_HomeWizardEntityT", bound=HomeWizardEntity) @@ -30,11 +31,19 @@ def homewizard_exception_handler( try: await func(self, *args, **kwargs) except RequestError as ex: - raise HomeAssistantError from ex + raise HomeAssistantError( + "An error occurred while communicating with HomeWizard device", + translation_domain=DOMAIN, + translation_key="communication_error", + ) from ex except DisabledError as ex: await self.hass.config_entries.async_reload( self.coordinator.config_entry.entry_id ) - raise HomeAssistantError from ex + raise HomeAssistantError( + "The local API of the HomeWizard device is disabled", + translation_domain=DOMAIN, + translation_key="api_disabled", + ) from ex return handler diff --git a/homeassistant/components/homewizard/manifest.json b/homeassistant/components/homewizard/manifest.json index b987fd6f208..949dda2a8aa 100644 --- a/homeassistant/components/homewizard/manifest.json +++ b/homeassistant/components/homewizard/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_polling", "loggers": ["homewizard_energy"], "quality_scale": "platinum", - "requirements": ["python-homewizard-energy==3.1.0"], + "requirements": ["python-homewizard-energy==4.1.0"], "zeroconf": ["_hwenergy._tcp.local."] } diff --git a/homeassistant/components/homewizard/sensor.py b/homeassistant/components/homewizard/sensor.py index 84aa58f2d27..78cee9ee6fe 100644 --- a/homeassistant/components/homewizard/sensor.py +++ b/homeassistant/components/homewizard/sensor.py @@ -35,21 +35,13 @@ from .entity import HomeWizardEntity PARALLEL_UPDATES = 1 -@dataclass -class HomeWizardEntityDescriptionMixin: - """Mixin values for HomeWizard entities.""" - - has_fn: Callable[[Data], bool] - value_fn: Callable[[Data], StateType] - - -@dataclass -class HomeWizardSensorEntityDescription( - SensorEntityDescription, HomeWizardEntityDescriptionMixin -): +@dataclass(kw_only=True) +class HomeWizardSensorEntityDescription(SensorEntityDescription): """Class describing HomeWizard sensor entities.""" enabled_fn: Callable[[Data], bool] = lambda data: True + has_fn: Callable[[Data], bool] + value_fn: Callable[[Data], StateType] SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( @@ -114,7 +106,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_energy_import_kwh is not None, - value_fn=lambda data: data.total_energy_import_kwh or None, + value_fn=lambda data: data.total_energy_import_kwh, ), HomeWizardSensorEntityDescription( key="total_power_import_t1_kwh", @@ -122,8 +114,12 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, - has_fn=lambda data: data.total_energy_import_t1_kwh is not None, - value_fn=lambda data: data.total_energy_import_t1_kwh or None, + has_fn=lambda data: ( + # SKT/SDM230/630 provides both total and tariff 1: duplicate. + data.total_energy_import_t1_kwh is not None + and data.total_energy_export_t2_kwh is not None + ), + value_fn=lambda data: data.total_energy_import_t1_kwh, ), HomeWizardSensorEntityDescription( key="total_power_import_t2_kwh", @@ -132,7 +128,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_energy_import_t2_kwh is not None, - value_fn=lambda data: data.total_energy_import_t2_kwh or None, + value_fn=lambda data: data.total_energy_import_t2_kwh, ), HomeWizardSensorEntityDescription( key="total_power_import_t3_kwh", @@ -141,7 +137,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_energy_import_t3_kwh is not None, - value_fn=lambda data: data.total_energy_import_t3_kwh or None, + value_fn=lambda data: data.total_energy_import_t3_kwh, ), HomeWizardSensorEntityDescription( key="total_power_import_t4_kwh", @@ -150,7 +146,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_energy_import_t4_kwh is not None, - value_fn=lambda data: data.total_energy_import_t4_kwh or None, + value_fn=lambda data: data.total_energy_import_t4_kwh, ), HomeWizardSensorEntityDescription( key="total_power_export_kwh", @@ -160,7 +156,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_energy_export_kwh is not None, enabled_fn=lambda data: data.total_energy_export_kwh != 0, - value_fn=lambda data: data.total_energy_export_kwh or None, + value_fn=lambda data: data.total_energy_export_kwh, ), HomeWizardSensorEntityDescription( key="total_power_export_t1_kwh", @@ -168,9 +164,13 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, - has_fn=lambda data: data.total_energy_export_t1_kwh is not None, + has_fn=lambda data: ( + # SKT/SDM230/630 provides both total and tariff 1: duplicate. + data.total_energy_export_t1_kwh is not None + and data.total_energy_export_t2_kwh is not None + ), enabled_fn=lambda data: data.total_energy_export_t1_kwh != 0, - value_fn=lambda data: data.total_energy_export_t1_kwh or None, + value_fn=lambda data: data.total_energy_export_t1_kwh, ), HomeWizardSensorEntityDescription( key="total_power_export_t2_kwh", @@ -180,7 +180,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_energy_export_t2_kwh is not None, enabled_fn=lambda data: data.total_energy_export_t2_kwh != 0, - value_fn=lambda data: data.total_energy_export_t2_kwh or None, + value_fn=lambda data: data.total_energy_export_t2_kwh, ), HomeWizardSensorEntityDescription( key="total_power_export_t3_kwh", @@ -190,7 +190,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_energy_export_t3_kwh is not None, enabled_fn=lambda data: data.total_energy_export_t3_kwh != 0, - value_fn=lambda data: data.total_energy_export_t3_kwh or None, + value_fn=lambda data: data.total_energy_export_t3_kwh, ), HomeWizardSensorEntityDescription( key="total_power_export_t4_kwh", @@ -200,7 +200,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_energy_export_t4_kwh is not None, enabled_fn=lambda data: data.total_energy_export_t4_kwh != 0, - value_fn=lambda data: data.total_energy_export_t4_kwh or None, + value_fn=lambda data: data.total_energy_export_t4_kwh, ), HomeWizardSensorEntityDescription( key="active_power_w", @@ -399,7 +399,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( device_class=SensorDeviceClass.GAS, state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_gas_m3 is not None, - value_fn=lambda data: data.total_gas_m3 or None, + value_fn=lambda data: data.total_gas_m3, ), HomeWizardSensorEntityDescription( key="gas_unique_id", @@ -426,7 +426,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( device_class=SensorDeviceClass.WATER, state_class=SensorStateClass.TOTAL_INCREASING, has_fn=lambda data: data.total_liter_m3 is not None, - value_fn=lambda data: data.total_liter_m3 or None, + value_fn=lambda data: data.total_liter_m3, ), ) diff --git a/homeassistant/components/homewizard/strings.json b/homeassistant/components/homewizard/strings.json index 3bc55b3c848..acdb321d6ff 100644 --- a/homeassistant/components/homewizard/strings.json +++ b/homeassistant/components/homewizard/strings.json @@ -167,5 +167,13 @@ "name": "Cloud connection" } } + }, + "exceptions": { + "api_disabled": { + "message": "The local API of the HomeWizard device is disabled" + }, + "communication_error": { + "message": "An error occurred while communicating with HomeWizard device" + } } } diff --git a/homeassistant/components/homewizard/switch.py b/homeassistant/components/homewizard/switch.py index ed59963aa41..3f854aad320 100644 --- a/homeassistant/components/homewizard/switch.py +++ b/homeassistant/components/homewizard/switch.py @@ -23,23 +23,15 @@ from .entity import HomeWizardEntity from .helpers import homewizard_exception_handler -@dataclass -class HomeWizardEntityDescriptionMixin: - """Mixin values for HomeWizard entities.""" - - create_fn: Callable[[HWEnergyDeviceUpdateCoordinator], bool] - available_fn: Callable[[DeviceResponseEntry], bool] - is_on_fn: Callable[[DeviceResponseEntry], bool | None] - set_fn: Callable[[HomeWizardEnergy, bool], Awaitable[Any]] - - -@dataclass -class HomeWizardSwitchEntityDescription( - SwitchEntityDescription, HomeWizardEntityDescriptionMixin -): +@dataclass(kw_only=True) +class HomeWizardSwitchEntityDescription(SwitchEntityDescription): """Class describing HomeWizard switch entities.""" + available_fn: Callable[[DeviceResponseEntry], bool] + create_fn: Callable[[HWEnergyDeviceUpdateCoordinator], bool] icon_off: str | None = None + is_on_fn: Callable[[DeviceResponseEntry], bool | None] + set_fn: Callable[[HomeWizardEnergy, bool], Awaitable[Any]] SWITCHES = [ diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index e9af4b2fd95..dfac69b3aed 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -6,7 +6,13 @@ import datetime from typing import Any from aiohttp import ClientConnectionError -from aiosomecomfort import SomeComfortError, UnauthorizedError, UnexpectedResponse +from aiosomecomfort import ( + AuthError, + ConnectionError as AscConnectionError, + SomeComfortError, + UnauthorizedError, + UnexpectedResponse, +) from aiosomecomfort.device import Device as SomeComfortDevice from homeassistant.components.climate import ( @@ -492,31 +498,38 @@ class HoneywellUSThermostat(ClimateEntity): async def async_update(self) -> None: """Get the latest state from the service.""" - try: - await self._device.refresh() - self._attr_available = True - self._retry = 0 - except UnauthorizedError: + async def _login() -> None: try: await self._data.client.login() await self._device.refresh() - self._attr_available = True - self._retry = 0 except ( - SomeComfortError, + AuthError, ClientConnectionError, asyncio.TimeoutError, ): self._retry += 1 - if self._retry > RETRY: - self._attr_available = False + self._attr_available = self._retry <= RETRY + return - except (ClientConnectionError, asyncio.TimeoutError): + self._attr_available = True + self._retry = 0 + + try: + await self._device.refresh() + + except UnauthorizedError: + await _login() + return + + except (AscConnectionError, ClientConnectionError, asyncio.TimeoutError): self._retry += 1 - if self._retry > RETRY: - self._attr_available = False + self._attr_available = self._retry <= RETRY + return except UnexpectedResponse: - pass + return + + self._attr_available = True + self._retry = 0 diff --git a/homeassistant/components/honeywell/manifest.json b/homeassistant/components/honeywell/manifest.json index a53eaaab8ce..c4ddba49357 100644 --- a/homeassistant/components/honeywell/manifest.json +++ b/homeassistant/components/honeywell/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/honeywell", "iot_class": "cloud_polling", "loggers": ["somecomfort"], - "requirements": ["AIOSomecomfort==0.0.17"] + "requirements": ["AIOSomecomfort==0.0.24"] } diff --git a/homeassistant/components/http/__init__.py b/homeassistant/components/http/__init__.py index 5a1d182e80c..449f00fb335 100644 --- a/homeassistant/components/http/__init__.py +++ b/homeassistant/components/http/__init__.py @@ -16,7 +16,6 @@ from aiohttp.http_parser import RawRequestMessage from aiohttp.streams import StreamReader from aiohttp.typedefs import JSONDecoder, StrOrURL from aiohttp.web_exceptions import HTTPMovedPermanently, HTTPRedirection -from aiohttp.web_log import AccessLogger from aiohttp.web_protocol import RequestHandler from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher from aiohttp_zlib_ng import enable_zlib_ng @@ -238,25 +237,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -class HomeAssistantAccessLogger(AccessLogger): - """Access logger for Home Assistant that does not log when disabled.""" - - def log( - self, request: web.BaseRequest, response: web.StreamResponse, time: float - ) -> None: - """Log the request. - - The default implementation logs the request to the logger - with the INFO level and than throws it away if the logger - is not enabled for the INFO level. This implementation - does not log the request if the logger is not enabled for - the INFO level. - """ - if not self.logger.isEnabledFor(logging.INFO): - return - super().log(request, response, time) - - class HomeAssistantRequest(web.Request): """Home Assistant request object.""" @@ -540,9 +520,7 @@ class HomeAssistantHTTP: # pylint: disable-next=protected-access self.app._router.freeze = lambda: None # type: ignore[method-assign] - self.runner = web.AppRunner( - self.app, access_log_class=HomeAssistantAccessLogger - ) + self.runner = web.AppRunner(self.app, handler_cancellation=True) await self.runner.setup() self.site = HomeAssistantTCPSite( diff --git a/homeassistant/components/http/auth.py b/homeassistant/components/http/auth.py index fc7b3c03abe..618bab91f7f 100644 --- a/homeassistant/components/http/auth.py +++ b/homeassistant/components/http/auth.py @@ -21,6 +21,7 @@ from homeassistant.auth.models import User from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.json import json_bytes +from homeassistant.helpers.network import is_cloud_connection from homeassistant.helpers.storage import Store from homeassistant.util.network import is_local @@ -98,12 +99,8 @@ def async_user_not_allowed_do_auth( if not request: return "No request available to validate local access" - if "cloud" in hass.config.components: - # pylint: disable-next=import-outside-toplevel - from hass_nabucasa import remote - - if remote.is_cloud_request.get(): - return "User is local only" + if is_cloud_connection(hass): + return "User is local only" try: remote_address = ip_address(request.remote) # type: ignore[arg-type] diff --git a/homeassistant/components/http/ban.py b/homeassistant/components/http/ban.py index 89d927ee8af..c56dd6c343b 100644 --- a/homeassistant/components/http/ban.py +++ b/homeassistant/components/http/ban.py @@ -243,5 +243,6 @@ class IpBanManager: async def async_add_ban(self, remote_addr: IPv4Address | IPv6Address) -> None: """Add a new IP address to the banned list.""" - new_ban = self.ip_bans_lookup[remote_addr] = IpBan(remote_addr) - await self.hass.async_add_executor_job(self._add_ban, new_ban) + if remote_addr not in self.ip_bans_lookup: + new_ban = self.ip_bans_lookup[remote_addr] = IpBan(remote_addr) + await self.hass.async_add_executor_job(self._add_ban, new_ban) diff --git a/homeassistant/components/http/manifest.json b/homeassistant/components/http/manifest.json index f2f8b51665a..c68ecd79d5f 100644 --- a/homeassistant/components/http/manifest.json +++ b/homeassistant/components/http/manifest.json @@ -8,7 +8,7 @@ "quality_scale": "internal", "requirements": [ "aiohttp_cors==0.7.0", - "aiohttp-fast-url-dispatcher==0.1.0", + "aiohttp-fast-url-dispatcher==0.3.0", "aiohttp-zlib-ng==0.1.1" ] } diff --git a/homeassistant/components/http/view.py b/homeassistant/components/http/view.py index 7481381bbc8..1be3d761a3b 100644 --- a/homeassistant/components/http/view.py +++ b/homeassistant/components/http/view.py @@ -20,7 +20,6 @@ import voluptuous as vol from homeassistant import exceptions from homeassistant.const import CONTENT_TYPE_JSON from homeassistant.core import Context, HomeAssistant, is_callback -from homeassistant.helpers.aiohttp_compat import enable_compression from homeassistant.helpers.json import ( find_paths_unserializable_data, json_bytes, @@ -72,8 +71,9 @@ class HomeAssistantView: content_type=CONTENT_TYPE_JSON, status=int(status_code), headers=headers, + zlib_executor_size=32768, ) - enable_compression(response) + response.enable_compression() return response def json_message( diff --git a/homeassistant/components/huawei_lte/__init__.py b/homeassistant/components/huawei_lte/__init__.py index 929ca0193af..d8c939e5c3a 100644 --- a/homeassistant/components/huawei_lte/__init__.py +++ b/homeassistant/components/huawei_lte/__init__.py @@ -35,6 +35,7 @@ from homeassistant.const import ( CONF_RECIPIENT, CONF_URL, CONF_USERNAME, + CONF_VERIFY_SSL, EVENT_HOMEASSISTANT_STOP, Platform, ) @@ -50,6 +51,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval +from homeassistant.helpers.issue_registry import IssueSeverity, create_issue from homeassistant.helpers.service import async_register_admin_service from homeassistant.helpers.typing import ConfigType @@ -57,6 +59,8 @@ from .const import ( ADMIN_SERVICES, ALL_KEYS, ATTR_CONFIG_ENTRY_ID, + BUTTON_KEY_CLEAR_TRAFFIC_STATISTICS, + BUTTON_KEY_RESTART, CONF_MANUFACTURER, CONF_UNAUTHENTICATED_MODE, CONNECTION_TIMEOUT, @@ -86,7 +90,7 @@ from .const import ( SERVICE_SUSPEND_INTEGRATION, UPDATE_SIGNAL, ) -from .utils import get_device_macs +from .utils import get_device_macs, non_verifying_requests_session _LOGGER = logging.getLogger(__name__) @@ -127,6 +131,7 @@ SERVICE_SCHEMA = vol.Schema({vol.Optional(CONF_URL): cv.url}) PLATFORMS = [ Platform.BINARY_SENSOR, + Platform.BUTTON, Platform.DEVICE_TRACKER, Platform.SENSOR, Platform.SWITCH, @@ -302,10 +307,11 @@ class Router: """Log out router session.""" try: self.client.user.logout() - except ResponseErrorNotSupportedException: - _LOGGER.debug("Logout not supported by device", exc_info=True) - except ResponseErrorLoginRequiredException: - _LOGGER.debug("Logout not supported when not logged in", exc_info=True) + except ( + ResponseErrorLoginRequiredException, + ResponseErrorNotSupportedException, + ): + pass # Ok, normal, nothing to do except Exception: # pylint: disable=broad-except _LOGGER.warning("Logout error", exc_info=True) @@ -331,16 +337,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: def _connect() -> Connection: """Set up a connection.""" + kwargs: dict[str, Any] = { + "timeout": CONNECTION_TIMEOUT, + } + if url.startswith("https://") and not entry.data.get(CONF_VERIFY_SSL): + kwargs["requests_session"] = non_verifying_requests_session(url) if entry.options.get(CONF_UNAUTHENTICATED_MODE): _LOGGER.debug("Connecting in unauthenticated mode, reduced feature set") - connection = Connection(url, timeout=CONNECTION_TIMEOUT) + connection = Connection(url, **kwargs) else: _LOGGER.debug("Connecting in authenticated mode, full feature set") username = entry.data.get(CONF_USERNAME) or "" password = entry.data.get(CONF_PASSWORD) or "" - connection = Connection( - url, username=username, password=password, timeout=CONNECTION_TIMEOUT - ) + connection = Connection(url, username=username, password=password, **kwargs) return connection try: @@ -524,12 +533,38 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return if service.service == SERVICE_CLEAR_TRAFFIC_STATISTICS: + create_issue( + hass, + DOMAIN, + "service_clear_traffic_statistics_moved_to_button", + breaks_in_ha_version="2024.2.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="service_changed_to_button", + translation_placeholders={ + "service": service.service, + "button": BUTTON_KEY_CLEAR_TRAFFIC_STATISTICS, + }, + ) if router.suspended: _LOGGER.debug("%s: ignored, integration suspended", service.service) return result = router.client.monitoring.set_clear_traffic() _LOGGER.debug("%s: %s", service.service, result) elif service.service == SERVICE_REBOOT: + create_issue( + hass, + DOMAIN, + "service_reboot_moved_to_button", + breaks_in_ha_version="2024.2.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="service_changed_to_button", + translation_placeholders={ + "service": service.service, + "button": BUTTON_KEY_RESTART, + }, + ) if router.suspended: _LOGGER.debug("%s: ignored, integration suspended", service.service) return diff --git a/homeassistant/components/huawei_lte/button.py b/homeassistant/components/huawei_lte/button.py new file mode 100644 index 00000000000..f494836e80d --- /dev/null +++ b/homeassistant/components/huawei_lte/button.py @@ -0,0 +1,97 @@ +"""Huawei LTE buttons.""" + +from __future__ import annotations + +import logging + +from huawei_lte_api.enums.device import ControlModeEnum + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_platform + +from . import HuaweiLteBaseEntityWithDevice +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: entity_platform.AddEntitiesCallback, +) -> None: + """Set up Huawei LTE buttons.""" + router = hass.data[DOMAIN].routers[config_entry.entry_id] + buttons = [ + ClearTrafficStatisticsButton(router), + RestartButton(router), + ] + async_add_entities(buttons) + + +class BaseButton(HuaweiLteBaseEntityWithDevice, ButtonEntity): + """Huawei LTE button base class.""" + + @property + def _device_unique_id(self) -> str: + """Return unique ID for entity within a router.""" + return f"button-{self.entity_description.key}" + + async def async_update(self) -> None: + """Update is not necessary for button entities.""" + + def press(self) -> None: + """Press button.""" + if self.router.suspended: + _LOGGER.debug( + "%s: ignored, integration suspended", self.entity_description.key + ) + return + result = self._press() + _LOGGER.debug("%s: %s", self.entity_description.key, result) + + def _press(self) -> str: + """Invoke low level action of button press.""" + raise NotImplementedError + + +BUTTON_KEY_CLEAR_TRAFFIC_STATISTICS = "clear_traffic_statistics" + + +class ClearTrafficStatisticsButton(BaseButton): + """Huawei LTE clear traffic statistics button.""" + + entity_description = ButtonEntityDescription( + key=BUTTON_KEY_CLEAR_TRAFFIC_STATISTICS, + name="Clear traffic statistics", + entity_category=EntityCategory.CONFIG, + ) + + def _press(self) -> str: + """Call clear traffic statistics endpoint.""" + return self.router.client.monitoring.set_clear_traffic() + + +BUTTON_KEY_RESTART = "restart" + + +class RestartButton(BaseButton): + """Huawei LTE restart button.""" + + entity_description = ButtonEntityDescription( + key=BUTTON_KEY_RESTART, + name="Restart", + device_class=ButtonDeviceClass.RESTART, + entity_category=EntityCategory.CONFIG, + ) + + def _press(self) -> str: + """Call restart endpoint.""" + return self.router.client.device.set_control(ControlModeEnum.REBOOT) diff --git a/homeassistant/components/huawei_lte/config_flow.py b/homeassistant/components/huawei_lte/config_flow.py index 6d7b0b9bb11..c97c8d6367b 100644 --- a/homeassistant/components/huawei_lte/config_flow.py +++ b/homeassistant/components/huawei_lte/config_flow.py @@ -16,7 +16,7 @@ from huawei_lte_api.exceptions import ( ResponseErrorException, ) from huawei_lte_api.Session import GetResponseType -from requests.exceptions import Timeout +from requests.exceptions import SSLError, Timeout from url_normalize import url_normalize import voluptuous as vol @@ -29,6 +29,7 @@ from homeassistant.const import ( CONF_RECIPIENT, CONF_URL, CONF_USERNAME, + CONF_VERIFY_SSL, ) from homeassistant.core import callback from homeassistant.data_entry_flow import FlowResult @@ -44,7 +45,7 @@ from .const import ( DEFAULT_UNAUTHENTICATED_MODE, DOMAIN, ) -from .utils import get_device_macs +from .utils import get_device_macs, non_verifying_requests_session _LOGGER = logging.getLogger(__name__) @@ -80,6 +81,13 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): self.context.get(CONF_URL, ""), ), ): str, + vol.Optional( + CONF_VERIFY_SSL, + default=user_input.get( + CONF_VERIFY_SSL, + False, + ), + ): bool, vol.Optional( CONF_USERNAME, default=user_input.get(CONF_USERNAME) or "" ): str, @@ -119,11 +127,20 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): password = user_input.get(CONF_PASSWORD) or "" def _get_connection() -> Connection: + if ( + user_input[CONF_URL].startswith("https://") + and not user_input[CONF_VERIFY_SSL] + ): + requests_session = non_verifying_requests_session(user_input[CONF_URL]) + else: + requests_session = None + return Connection( url=user_input[CONF_URL], username=username, password=password, timeout=CONNECTION_TIMEOUT, + requests_session=requests_session, ) conn = None @@ -140,6 +157,12 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): except ResponseErrorException: _LOGGER.warning("Response error", exc_info=True) errors["base"] = "response_error" + except SSLError: + _LOGGER.warning("SSL error", exc_info=True) + if user_input[CONF_VERIFY_SSL]: + errors[CONF_URL] = "ssl_error_try_unverified" + else: + errors[CONF_URL] = "ssl_error_try_plain" except Timeout: _LOGGER.warning("Connection timeout", exc_info=True) errors[CONF_URL] = "connection_timeout" @@ -152,6 +175,7 @@ class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): def _disconnect(conn: Connection) -> None: try: conn.close() + conn.requests_session.close() except Exception: # pylint: disable=broad-except _LOGGER.debug("Disconnect error", exc_info=True) diff --git a/homeassistant/components/huawei_lte/const.py b/homeassistant/components/huawei_lte/const.py index 53cc0efb919..eba0f3ce90b 100644 --- a/homeassistant/components/huawei_lte/const.py +++ b/homeassistant/components/huawei_lte/const.py @@ -79,3 +79,6 @@ ALL_KEYS = ( | SWITCH_KEYS | {KEY_DEVICE_BASIC_INFORMATION} ) + +BUTTON_KEY_CLEAR_TRAFFIC_STATISTICS = "clear_traffic_statistics" +BUTTON_KEY_RESTART = "restart" diff --git a/homeassistant/components/huawei_lte/manifest.json b/homeassistant/components/huawei_lte/manifest.json index d563bed4d46..9a44024111c 100644 --- a/homeassistant/components/huawei_lte/manifest.json +++ b/homeassistant/components/huawei_lte/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["huawei_lte_api.Session"], "requirements": [ - "huawei-lte-api==1.6.11", + "huawei-lte-api==1.7.3", "stringcase==1.2.0", "url-normalize==1.4.3" ], diff --git a/homeassistant/components/huawei_lte/sensor.py b/homeassistant/components/huawei_lte/sensor.py index 07486297b32..ca3734bb305 100644 --- a/homeassistant/components/huawei_lte/sensor.py +++ b/homeassistant/components/huawei_lte/sensor.py @@ -8,8 +8,6 @@ from datetime import datetime, timedelta import logging import re -from huawei_lte_api.enums.net import NetworkModeEnum - from homeassistant.components.sensor import ( DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, @@ -575,10 +573,6 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = { "State": HuaweiSensorEntityDescription( key="State", translation_key="operator_search_mode", - format_fn=lambda x: ( - {"0": "Auto", "1": "Manual"}.get(x), - None, - ), entity_category=EntityCategory.DIAGNOSTIC, ), }, @@ -588,19 +582,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = { descriptions={ "NetworkMode": HuaweiSensorEntityDescription( key="NetworkMode", - translation_key="preferred_mode", - format_fn=lambda x: ( - { - NetworkModeEnum.MODE_AUTO.value: "4G/3G/2G", - NetworkModeEnum.MODE_4G_3G_AUTO.value: "4G/3G", - NetworkModeEnum.MODE_4G_2G_AUTO.value: "4G/2G", - NetworkModeEnum.MODE_4G_ONLY.value: "4G", - NetworkModeEnum.MODE_3G_2G_AUTO.value: "3G/2G", - NetworkModeEnum.MODE_3G_ONLY.value: "3G", - NetworkModeEnum.MODE_2G_ONLY.value: "2G", - }.get(x), - None, - ), + translation_key="preferred_network_mode", entity_category=EntityCategory.DIAGNOSTIC, ), }, @@ -718,10 +700,6 @@ class HuaweiLteSensor(HuaweiLteBaseEntityWithDevice, SensorEntity): _unit: str | None = field(default=None, init=False) _last_reset: datetime | None = field(default=None, init=False) - def __post_init__(self) -> None: - """Initialize remaining attributes.""" - self._attr_name = self.entity_description.name or self.item - async def async_added_to_hass(self) -> None: """Subscribe to needed data on add.""" await super().async_added_to_hass() diff --git a/homeassistant/components/huawei_lte/strings.json b/homeassistant/components/huawei_lte/strings.json index f188eb9e17b..754f192e57e 100644 --- a/homeassistant/components/huawei_lte/strings.json +++ b/homeassistant/components/huawei_lte/strings.json @@ -14,6 +14,8 @@ "invalid_url": "Invalid URL", "login_attempts_exceeded": "Maximum login attempts exceeded, please try again later", "response_error": "Unknown error from device", + "ssl_error_try_plain": "HTTPS error, please try a plain HTTP URL", + "ssl_error_try_unverified": "HTTPS error, please try disabling certificate verification or a plain HTTP URL", "unknown": "[%key:common::config_flow::error::unknown%]" }, "flow_title": "{name}", @@ -30,7 +32,8 @@ "data": { "password": "[%key:common::config_flow::data::password%]", "url": "[%key:common::config_flow::data::url%]", - "username": "[%key:common::config_flow::data::username%]" + "username": "[%key:common::config_flow::data::username%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "description": "Enter device access details.", "title": "Configure Huawei LTE" @@ -228,10 +231,23 @@ "name": "Operator code" }, "operator_search_mode": { - "name": "Operator search mode" + "name": "Operator search mode", + "state": { + "0": "Auto", + "1": "Manual" + } }, - "preferred_mode": { - "name": "Preferred mode" + "preferred_network_mode": { + "name": "Preferred network mode", + "state": { + "00": "4G/3G/2G auto", + "0302": "4G/3G auto", + "0301": "4G/2G auto", + "03": "4G only", + "0201": "3G/2G auto", + "02": "3G only", + "01": "2G only" + } }, "sms_deleted_device": { "name": "SMS deleted (device)" @@ -279,6 +295,12 @@ } } }, + "issues": { + "service_changed_to_button": { + "title": "Service changed to a button", + "description": "The {service} service is deprecated, use the corresponding {button} button instead." + } + }, "services": { "clear_traffic_statistics": { "name": "Clear traffic statistics", diff --git a/homeassistant/components/huawei_lte/utils.py b/homeassistant/components/huawei_lte/utils.py index 172e8658928..df212a1c25d 100644 --- a/homeassistant/components/huawei_lte/utils.py +++ b/homeassistant/components/huawei_lte/utils.py @@ -2,8 +2,13 @@ from __future__ import annotations from contextlib import suppress +import re +from urllib.parse import urlparse +import warnings from huawei_lte_api.Session import GetResponseType +import requests +from urllib3.exceptions import InsecureRequestWarning from homeassistant.helpers.device_registry import format_mac @@ -25,3 +30,18 @@ def get_device_macs( macs.extend(x.get("WifiMac") for x in wlan_settings["Ssids"]["Ssid"]) return sorted({format_mac(str(x)) for x in macs if x}) + + +def non_verifying_requests_session(url: str) -> requests.Session: + """Get requests.Session that does not verify HTTPS, filter warnings about it.""" + parsed_url = urlparse(url) + assert parsed_url.hostname + requests_session = requests.Session() + requests_session.verify = False + warnings.filterwarnings( + "ignore", + message=rf"^.*\b{re.escape(parsed_url.hostname)}\b", + category=InsecureRequestWarning, + module=r"^urllib3\.connectionpool$", + ) + return requests_session diff --git a/homeassistant/components/hue/strings.json b/homeassistant/components/hue/strings.json index 4022c61bc36..122cb489d26 100644 --- a/homeassistant/components/hue/strings.json +++ b/homeassistant/components/hue/strings.json @@ -5,12 +5,18 @@ "title": "Pick Hue bridge", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your Hue bridge." } }, "manual": { "title": "Manual configure a Hue bridge", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your Hue bridge." } }, "link": { diff --git a/homeassistant/components/hunterdouglas_powerview/select.py b/homeassistant/components/hunterdouglas_powerview/select.py index 37d1193e0e5..151b3a58011 100644 --- a/homeassistant/components/hunterdouglas_powerview/select.py +++ b/homeassistant/components/hunterdouglas_powerview/select.py @@ -116,5 +116,6 @@ class PowerViewSelect(ShadeEntity, SelectEntity): async def async_select_option(self, option: str) -> None: """Change the selected option.""" await self.entity_description.select_fn(self._shade, option) - await self._shade.refresh() # force update data to ensure new info is in coordinator + # force update data to ensure new info is in coordinator + await self._shade.refresh() self.async_write_ha_state() diff --git a/homeassistant/components/hvv_departures/binary_sensor.py b/homeassistant/components/hvv_departures/binary_sensor.py index 0ec08e9c791..8337921acf6 100644 --- a/homeassistant/components/hvv_departures/binary_sensor.py +++ b/homeassistant/components/hvv_departures/binary_sensor.py @@ -125,13 +125,29 @@ class HvvDepartureBinarySensor(CoordinatorEntity, BinarySensorEntity): _attr_attribution = ATTRIBUTION _attr_has_entity_name = True + _attr_device_class = BinarySensorDeviceClass.PROBLEM def __init__(self, coordinator, idx, config_entry): """Initialize.""" super().__init__(coordinator) self.coordinator = coordinator self.idx = idx - self.config_entry = config_entry + + self._attr_name = coordinator.data[idx]["name"] + self._attr_unique_id = idx + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={ + ( + DOMAIN, + config_entry.entry_id, + config_entry.data[CONF_STATION]["id"], + config_entry.data[CONF_STATION]["type"], + ) + }, + manufacturer=MANUFACTURER, + name=f"Departures at {config_entry.data[CONF_STATION]['name']}", + ) @property def is_on(self): @@ -146,38 +162,6 @@ class HvvDepartureBinarySensor(CoordinatorEntity, BinarySensorEntity): and self.coordinator.data[self.idx]["available"] ) - @property - def device_info(self): - """Return the device info for this sensor.""" - return DeviceInfo( - entry_type=DeviceEntryType.SERVICE, - identifiers={ - ( - DOMAIN, - self.config_entry.entry_id, - self.config_entry.data[CONF_STATION]["id"], - self.config_entry.data[CONF_STATION]["type"], - ) - }, - manufacturer=MANUFACTURER, - name=f"Departures at {self.config_entry.data[CONF_STATION]['name']}", - ) - - @property - def name(self): - """Return the name of the sensor.""" - return self.coordinator.data[self.idx]["name"] - - @property - def unique_id(self): - """Return a unique ID to use for this sensor.""" - return self.idx - - @property - def device_class(self): - """Return the class of this device, from component DEVICE_CLASSES.""" - return BinarySensorDeviceClass.PROBLEM - @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" diff --git a/homeassistant/components/hvv_departures/sensor.py b/homeassistant/components/hvv_departures/sensor.py index 76a7966a6ed..a8efb663c90 100644 --- a/homeassistant/components/hvv_departures/sensor.py +++ b/homeassistant/components/hvv_departures/sensor.py @@ -73,6 +73,19 @@ class HVVDepartureSensor(SensorEntity): station_id = config_entry.data[CONF_STATION]["id"] station_type = config_entry.data[CONF_STATION]["type"] self._attr_unique_id = f"{config_entry.entry_id}-{station_id}-{station_type}" + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={ + ( + DOMAIN, + config_entry.entry_id, + config_entry.data[CONF_STATION]["id"], + config_entry.data[CONF_STATION]["type"], + ) + }, + manufacturer=MANUFACTURER, + name=config_entry.data[CONF_STATION]["name"], + ) @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self, **kwargs: Any) -> None: @@ -165,20 +178,3 @@ class HVVDepartureSensor(SensorEntity): } ) self._attr_extra_state_attributes[ATTR_NEXT] = departures - - @property - def device_info(self): - """Return the device info for this sensor.""" - return DeviceInfo( - entry_type=DeviceEntryType.SERVICE, - identifiers={ - ( - DOMAIN, - self.config_entry.entry_id, - self.config_entry.data[CONF_STATION]["id"], - self.config_entry.data[CONF_STATION]["type"], - ) - }, - manufacturer=MANUFACTURER, - name=self.config_entry.data[CONF_STATION]["name"], - ) diff --git a/homeassistant/components/hydrawise/__init__.py b/homeassistant/components/hydrawise/__init__.py index ddff1954eb3..9f44d47ecf6 100644 --- a/homeassistant/components/hydrawise/__init__.py +++ b/homeassistant/components/hydrawise/__init__.py @@ -51,7 +51,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Hydrawise from a config entry.""" access_token = config_entry.data[CONF_API_KEY] - hydrawise = legacy.LegacyHydrawise(access_token, load_on_init=False) + hydrawise = legacy.LegacyHydrawiseAsync(access_token) coordinator = HydrawiseDataUpdateCoordinator(hass, hydrawise, SCAN_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = coordinator diff --git a/homeassistant/components/hydrawise/binary_sensor.py b/homeassistant/components/hydrawise/binary_sensor.py index 1953e413672..65355a1829f 100644 --- a/homeassistant/components/hydrawise/binary_sensor.py +++ b/homeassistant/components/hydrawise/binary_sensor.py @@ -1,7 +1,7 @@ """Support for Hydrawise sprinkler binary sensors.""" from __future__ import annotations -from pydrawise.legacy import LegacyHydrawise +from pydrawise.schema import Zone import voluptuous as vol from homeassistant.components.binary_sensor import ( @@ -69,26 +69,16 @@ async def async_setup_entry( coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][ config_entry.entry_id ] - hydrawise: LegacyHydrawise = coordinator.api - - entities = [ - HydrawiseBinarySensor( - data=hydrawise.current_controller, - coordinator=coordinator, - description=BINARY_SENSOR_STATUS, - device_id_key="controller_id", + entities = [] + for controller in coordinator.data.controllers: + entities.append( + HydrawiseBinarySensor(coordinator, BINARY_SENSOR_STATUS, controller) ) - ] - - # create a sensor for each zone - for zone in hydrawise.relays: - for description in BINARY_SENSOR_TYPES: - entities.append( - HydrawiseBinarySensor( - data=zone, coordinator=coordinator, description=description + for zone in controller.zones: + for description in BINARY_SENSOR_TYPES: + entities.append( + HydrawiseBinarySensor(coordinator, description, controller, zone) ) - ) - async_add_entities(entities) @@ -100,5 +90,5 @@ class HydrawiseBinarySensor(HydrawiseEntity, BinarySensorEntity): if self.entity_description.key == "status": self._attr_is_on = self.coordinator.last_update_success elif self.entity_description.key == "is_watering": - relay_data = self.coordinator.api.relays_by_zone_number[self.data["relay"]] - self._attr_is_on = relay_data["timestr"] == "Now" + zone: Zone = self.zone + self._attr_is_on = zone.scheduled_runs.current_run is not None diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index c4b37fb4a06..72df86606d7 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -5,8 +5,8 @@ from __future__ import annotations from collections.abc import Callable from typing import Any +from aiohttp import ClientError from pydrawise import legacy -from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol from homeassistant import config_entries @@ -27,20 +27,17 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): self, api_key: str, *, on_failure: Callable[[str], FlowResult] ) -> FlowResult: """Create the config entry.""" + api = legacy.LegacyHydrawiseAsync(api_key) try: - api = await self.hass.async_add_executor_job( - legacy.LegacyHydrawise, api_key - ) - except ConnectTimeout: + # Skip fetching zones to save on metered API calls. + user = await api.get_user(fetch_zones=False) + except TimeoutError: return on_failure("timeout_connect") - except HTTPError as ex: + except ClientError as ex: LOGGER.error("Unable to connect to Hydrawise cloud service: %s", ex) return on_failure("cannot_connect") - if not api.status: - return on_failure("unknown") - - await self.async_set_unique_id(f"hydrawise-{api.customer_id}") + await self.async_set_unique_id(f"hydrawise-{user.customer_id}") self._abort_if_unique_id_configured() return self.async_create_entry(title="Hydrawise", data={CONF_API_KEY: api_key}) diff --git a/homeassistant/components/hydrawise/coordinator.py b/homeassistant/components/hydrawise/coordinator.py index 007b15d2403..412108f859f 100644 --- a/homeassistant/components/hydrawise/coordinator.py +++ b/homeassistant/components/hydrawise/coordinator.py @@ -4,26 +4,25 @@ from __future__ import annotations from datetime import timedelta -from pydrawise.legacy import LegacyHydrawise +from pydrawise import HydrawiseBase +from pydrawise.schema import User from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN, LOGGER -class HydrawiseDataUpdateCoordinator(DataUpdateCoordinator[None]): +class HydrawiseDataUpdateCoordinator(DataUpdateCoordinator[User]): """The Hydrawise Data Update Coordinator.""" def __init__( - self, hass: HomeAssistant, api: LegacyHydrawise, scan_interval: timedelta + self, hass: HomeAssistant, api: HydrawiseBase, scan_interval: timedelta ) -> None: """Initialize HydrawiseDataUpdateCoordinator.""" super().__init__(hass, LOGGER, name=DOMAIN, update_interval=scan_interval) self.api = api - async def _async_update_data(self) -> None: + async def _async_update_data(self) -> User: """Fetch the latest data from Hydrawise.""" - result = await self.hass.async_add_executor_job(self.api.update_controller_info) - if not result: - raise UpdateFailed("Failed to refresh Hydrawise data") + return await self.api.get_user() diff --git a/homeassistant/components/hydrawise/entity.py b/homeassistant/components/hydrawise/entity.py index 38fde322673..c707690ce95 100644 --- a/homeassistant/components/hydrawise/entity.py +++ b/homeassistant/components/hydrawise/entity.py @@ -1,7 +1,7 @@ """Base classes for Hydrawise entities.""" from __future__ import annotations -from typing import Any +from pydrawise.schema import Controller, Zone from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo @@ -20,23 +20,25 @@ class HydrawiseEntity(CoordinatorEntity[HydrawiseDataUpdateCoordinator]): def __init__( self, - *, - data: dict[str, Any], coordinator: HydrawiseDataUpdateCoordinator, description: EntityDescription, - device_id_key: str = "relay_id", + controller: Controller, + zone: Zone | None = None, ) -> None: """Initialize the Hydrawise entity.""" super().__init__(coordinator=coordinator) - self.data = data self.entity_description = description - self._device_id = str(data.get(device_id_key)) + self.controller = controller + self.zone = zone + self._device_id = str(controller.id if zone is None else zone.id) self._attr_unique_id = f"{self._device_id}_{description.key}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, self._device_id)}, - name=data["name"], + name=controller.name if zone is None else zone.name, manufacturer=MANUFACTURER, ) + if zone is not None: + self._attr_device_info["via_device"] = (DOMAIN, str(controller.id)) self._update_attrs() def _update_attrs(self) -> None: diff --git a/homeassistant/components/hydrawise/sensor.py b/homeassistant/components/hydrawise/sensor.py index 369e952c1be..79a318f778f 100644 --- a/homeassistant/components/hydrawise/sensor.py +++ b/homeassistant/components/hydrawise/sensor.py @@ -1,6 +1,9 @@ """Support for Hydrawise sprinkler sensors.""" from __future__ import annotations +from datetime import datetime + +from pydrawise.schema import Zone import voluptuous as vol from homeassistant.components.sensor import ( @@ -71,27 +74,30 @@ async def async_setup_entry( coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][ config_entry.entry_id ] - entities = [ - HydrawiseSensor(data=zone, coordinator=coordinator, description=description) - for zone in coordinator.api.relays + async_add_entities( + HydrawiseSensor(coordinator, description, controller, zone) + for controller in coordinator.data.controllers + for zone in controller.zones for description in SENSOR_TYPES - ] - async_add_entities(entities) + ) class HydrawiseSensor(HydrawiseEntity, SensorEntity): """A sensor implementation for Hydrawise device.""" + zone: Zone + def _update_attrs(self) -> None: """Update state attributes.""" - relay_data = self.coordinator.api.relays_by_zone_number[self.data["relay"]] if self.entity_description.key == "watering_time": - if relay_data["timestr"] == "Now": - self._attr_native_value = int(relay_data["run"] / 60) + if (current_run := self.zone.scheduled_runs.current_run) is not None: + self._attr_native_value = int( + current_run.remaining_time.total_seconds() / 60 + ) else: self._attr_native_value = 0 - else: # _sensor_type == 'next_cycle' - next_cycle = min(relay_data["time"], TWO_YEAR_SECONDS) - self._attr_native_value = dt_util.utc_from_timestamp( - dt_util.as_timestamp(dt_util.now()) + next_cycle - ) + elif self.entity_description.key == "next_cycle": + if (next_run := self.zone.scheduled_runs.next_run) is not None: + self._attr_native_value = dt_util.as_utc(next_run.start_time) + else: + self._attr_native_value = datetime.max.replace(tzinfo=dt_util.UTC) diff --git a/homeassistant/components/hydrawise/switch.py b/homeassistant/components/hydrawise/switch.py index 2aa4ecc085b..5dd79d4a13e 100644 --- a/homeassistant/components/hydrawise/switch.py +++ b/homeassistant/components/hydrawise/switch.py @@ -1,8 +1,10 @@ """Support for Hydrawise cloud switches.""" from __future__ import annotations +from datetime import timedelta from typing import Any +from pydrawise.schema import Zone import voluptuous as vol from homeassistant.components.switch import ( @@ -17,6 +19,7 @@ from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util import dt as dt_util from .const import ( ALLOWED_WATERING_TIME, @@ -76,62 +79,44 @@ async def async_setup_entry( coordinator: HydrawiseDataUpdateCoordinator = hass.data[DOMAIN][ config_entry.entry_id ] - default_watering_timer = DEFAULT_WATERING_TIME - - entities = [ - HydrawiseSwitch( - data=zone, - coordinator=coordinator, - description=description, - default_watering_timer=default_watering_timer, - ) - for zone in coordinator.api.relays + async_add_entities( + HydrawiseSwitch(coordinator, description, controller, zone) + for controller in coordinator.data.controllers + for zone in controller.zones for description in SWITCH_TYPES - ] - - async_add_entities(entities) + ) class HydrawiseSwitch(HydrawiseEntity, SwitchEntity): """A switch implementation for Hydrawise device.""" - def __init__( - self, - *, - data: dict[str, Any], - coordinator: HydrawiseDataUpdateCoordinator, - description: SwitchEntityDescription, - default_watering_timer: int, - ) -> None: - """Initialize a switch for Hydrawise device.""" - super().__init__(data=data, coordinator=coordinator, description=description) - self._default_watering_timer = default_watering_timer + zone: Zone - def turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" - zone_number = self.data["relay"] if self.entity_description.key == "manual_watering": - self.coordinator.api.run_zone(self._default_watering_timer, zone_number) + await self.coordinator.api.start_zone( + self.zone, custom_run_duration=DEFAULT_WATERING_TIME + ) elif self.entity_description.key == "auto_watering": - self.coordinator.api.suspend_zone(0, zone_number) + await self.coordinator.api.resume_zone(self.zone) self._attr_is_on = True self.async_write_ha_state() - def turn_off(self, **kwargs: Any) -> None: + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the device off.""" - zone_number = self.data["relay"] if self.entity_description.key == "manual_watering": - self.coordinator.api.run_zone(0, zone_number) + await self.coordinator.api.stop_zone(self.zone) elif self.entity_description.key == "auto_watering": - self.coordinator.api.suspend_zone(365, zone_number) + await self.coordinator.api.suspend_zone( + self.zone, dt_util.now() + timedelta(days=365) + ) self._attr_is_on = False self.async_write_ha_state() def _update_attrs(self) -> None: """Update state attributes.""" - zone_number = self.data["relay"] - timestr = self.coordinator.api.relays_by_zone_number[zone_number]["timestr"] if self.entity_description.key == "manual_watering": - self._attr_is_on = timestr == "Now" + self._attr_is_on = self.zone.scheduled_runs.current_run is not None elif self.entity_description.key == "auto_watering": - self._attr_is_on = timestr not in {"", "Now"} + self._attr_is_on = self.zone.status.suspended_until is None diff --git a/homeassistant/components/hyperion/strings.json b/homeassistant/components/hyperion/strings.json index a2f8838e2ea..8d7e3751c4c 100644 --- a/homeassistant/components/hyperion/strings.json +++ b/homeassistant/components/hyperion/strings.json @@ -5,6 +5,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your Hyperion server." } }, "auth": { diff --git a/homeassistant/components/ialarm/strings.json b/homeassistant/components/ialarm/strings.json index 1ac7a25e6f8..cb2c75d74a9 100644 --- a/homeassistant/components/ialarm/strings.json +++ b/homeassistant/components/ialarm/strings.json @@ -5,6 +5,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of Antifurto365 iAlarm system." } } }, diff --git a/homeassistant/components/idasen_desk/__init__.py b/homeassistant/components/idasen_desk/__init__.py index 0a17ebec96c..5e112aa39f7 100644 --- a/homeassistant/components/idasen_desk/__init__.py +++ b/homeassistant/components/idasen_desk/__init__.py @@ -24,7 +24,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN -PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.COVER] +PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.COVER, Platform.SENSOR] _LOGGER = logging.getLogger(__name__) @@ -44,6 +44,7 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): super().__init__(hass, logger, name=name) self._address = address self._expected_connected = False + self._connection_lost = False self.desk = Desk(self.async_set_updated_data) @@ -63,6 +64,7 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): """Disconnect from desk.""" _LOGGER.debug("Disconnecting from %s", self._address) self._expected_connected = False + self._connection_lost = False await self.desk.disconnect() @callback @@ -71,7 +73,11 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): if self._expected_connected: if not self.desk.is_connected: _LOGGER.debug("Desk disconnected. Reconnecting") + self._connection_lost = True self.hass.async_create_task(self.async_connect()) + elif self._connection_lost: + _LOGGER.info("Reconnected to desk") + self._connection_lost = False elif self.desk.is_connected: _LOGGER.warning("Desk is connected but should not be. Disconnecting") self.hass.async_create_task(self.desk.disconnect()) diff --git a/homeassistant/components/idasen_desk/cover.py b/homeassistant/components/idasen_desk/cover.py index 3148616d182..1daebe52420 100644 --- a/homeassistant/components/idasen_desk/cover.py +++ b/homeassistant/components/idasen_desk/cover.py @@ -3,6 +3,8 @@ from __future__ import annotations from typing import Any +from bleak.exc import BleakError + from homeassistant.components.cover import ( ATTR_POSITION, CoverDeviceClass, @@ -12,6 +14,7 @@ from homeassistant.components.cover import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_NAME from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -71,19 +74,33 @@ class IdasenDeskCover(CoordinatorEntity[IdasenDeskCoordinator], CoverEntity): async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" - await self._desk.move_down() + try: + await self._desk.move_down() + except BleakError as err: + raise HomeAssistantError("Failed to move down: Bluetooth error") from err async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" - await self._desk.move_up() + try: + await self._desk.move_up() + except BleakError as err: + raise HomeAssistantError("Failed to move up: Bluetooth error") from err async def async_stop_cover(self, **kwargs: Any) -> None: """Stop the cover.""" - await self._desk.stop() + try: + await self._desk.stop() + except BleakError as err: + raise HomeAssistantError("Failed to stop moving: Bluetooth error") from err async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover shutter to a specific position.""" - await self._desk.move_to(int(kwargs[ATTR_POSITION])) + try: + await self._desk.move_to(int(kwargs[ATTR_POSITION])) + except BleakError as err: + raise HomeAssistantError( + "Failed to move to specified position: Bluetooth error" + ) from err @callback def _handle_coordinator_update(self, *args: Any) -> None: diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index ed941f4f87d..0a96a976bb3 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -11,5 +11,6 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/idasen_desk", "iot_class": "local_push", - "requirements": ["idasen-ha==2.3"] + "quality_scale": "silver", + "requirements": ["idasen-ha==2.4"] } diff --git a/homeassistant/components/idasen_desk/sensor.py b/homeassistant/components/idasen_desk/sensor.py new file mode 100644 index 00000000000..b67dec0f579 --- /dev/null +++ b/homeassistant/components/idasen_desk/sensor.py @@ -0,0 +1,100 @@ +"""Representation of Idasen Desk sensors.""" +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant import config_entries +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfLength +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import DeskData, IdasenDeskCoordinator +from .const import DOMAIN + + +@dataclass +class IdasenDeskSensorDescriptionMixin: + """Required values for IdasenDesk sensors.""" + + value_fn: Callable[[IdasenDeskCoordinator], float | None] + + +@dataclass +class IdasenDeskSensorDescription( + SensorEntityDescription, + IdasenDeskSensorDescriptionMixin, +): + """Class describing IdasenDesk sensor entities.""" + + +SENSORS = ( + IdasenDeskSensorDescription( + key="height", + translation_key="height", + icon="mdi:arrow-up-down", + native_unit_of_measurement=UnitOfLength.METERS, + device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + suggested_display_precision=3, + value_fn=lambda coordinator: coordinator.desk.height, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: config_entries.ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Idasen Desk sensors.""" + data: DeskData = hass.data[DOMAIN][entry.entry_id] + async_add_entities( + IdasenDeskSensor( + data.address, data.device_info, data.coordinator, sensor_description + ) + for sensor_description in SENSORS + ) + + +class IdasenDeskSensor(CoordinatorEntity[IdasenDeskCoordinator], SensorEntity): + """IdasenDesk sensor.""" + + entity_description: IdasenDeskSensorDescription + _attr_has_entity_name = True + + def __init__( + self, + address: str, + device_info: DeviceInfo, + coordinator: IdasenDeskCoordinator, + description: IdasenDeskSensorDescription, + ) -> None: + """Initialize the IdasenDesk sensor entity.""" + super().__init__(coordinator) + self.entity_description = description + + self._attr_unique_id = f"{description.key}-{address}" + self._attr_device_info = device_info + self._address = address + + async def async_added_to_hass(self) -> None: + """When entity is added to hass.""" + await super().async_added_to_hass() + self._handle_coordinator_update() + + @callback + def _handle_coordinator_update(self, *args: Any) -> None: + """Handle data update.""" + self._attr_native_value = self.entity_description.value_fn(self.coordinator) + super()._handle_coordinator_update() diff --git a/homeassistant/components/idasen_desk/strings.json b/homeassistant/components/idasen_desk/strings.json index 6b9bf80edfc..446ef93e542 100644 --- a/homeassistant/components/idasen_desk/strings.json +++ b/homeassistant/components/idasen_desk/strings.json @@ -19,5 +19,12 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_devices_found": "No unconfigured devices found. Make sure that the desk is in Bluetooth pairing mode. Enter pairing mode by pressing the small button with the Bluetooth logo on the controller for about 3 seconds, until it starts blinking." } + }, + "entity": { + "sensor": { + "height": { + "name": "Height" + } + } } } diff --git a/homeassistant/components/imap/__init__.py b/homeassistant/components/imap/__init__.py index 3914e0c52c1..fea2583a27a 100644 --- a/homeassistant/components/imap/__init__.py +++ b/homeassistant/components/imap/__init__.py @@ -66,8 +66,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): coordinator: ImapPushDataUpdateCoordinator | ImapPollingDataUpdateCoordinator = hass.data[ DOMAIN - ].pop( - entry.entry_id - ) + ].pop(entry.entry_id) await coordinator.shutdown() return unload_ok diff --git a/homeassistant/components/imap/coordinator.py b/homeassistant/components/imap/coordinator.py index 59c24b11e51..34286ce49fa 100644 --- a/homeassistant/components/imap/coordinator.py +++ b/homeassistant/components/imap/coordinator.py @@ -6,6 +6,7 @@ from collections.abc import Mapping from datetime import datetime, timedelta import email from email.header import decode_header, make_header +from email.message import Message from email.utils import parseaddr, parsedate_to_datetime import logging from typing import Any @@ -96,8 +97,9 @@ async def connect_to_server(data: Mapping[str, Any]) -> IMAP4_SSL: class ImapMessage: """Class to parse an RFC822 email message.""" - def __init__(self, raw_message: bytes) -> None: + def __init__(self, raw_message: bytes, charset: str = "utf-8") -> None: """Initialize IMAP message.""" + self._charset = charset self.email_message = email.message_from_bytes(raw_message) @property @@ -157,18 +159,30 @@ class ImapMessage: message_html: str | None = None message_untyped_text: str | None = None + def _decode_payload(part: Message) -> str: + """Try to decode text payloads. + + Common text encodings are quoted-printable or base64. + Falls back to the raw content part if decoding fails. + """ + try: + return str(part.get_payload(decode=True).decode(self._charset)) + except ValueError: + return str(part.get_payload()) + + part: Message for part in self.email_message.walk(): if part.get_content_type() == CONTENT_TYPE_TEXT_PLAIN: if message_text is None: - message_text = part.get_payload() + message_text = _decode_payload(part) elif part.get_content_type() == "text/html": if message_html is None: - message_html = part.get_payload() + message_html = _decode_payload(part) elif ( part.get_content_type().startswith("text") and message_untyped_text is None ): - message_untyped_text = part.get_payload() + message_untyped_text = str(part.get_payload()) if message_text is not None: return message_text @@ -223,7 +237,9 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]): """Send a event for the last message if the last message was changed.""" response = await self.imap_client.fetch(last_message_uid, "BODY.PEEK[]") if response.result == "OK": - message = ImapMessage(response.lines[1]) + message = ImapMessage( + response.lines[1], charset=self.config_entry.data[CONF_CHARSET] + ) # Set `initial` to `False` if the last message is triggered again initial: bool = True if (message_id := message.message_id) == self._last_message_id: diff --git a/homeassistant/components/insteon/config_flow.py b/homeassistant/components/insteon/config_flow.py index f5bafd935a0..36e977f6db0 100644 --- a/homeassistant/components/insteon/config_flow.py +++ b/homeassistant/components/insteon/config_flow.py @@ -38,6 +38,7 @@ from .schemas import ( add_x10_device, build_device_override_schema, build_hub_schema, + build_plm_manual_schema, build_plm_schema, build_remove_override_schema, build_remove_x10_schema, @@ -46,6 +47,7 @@ from .schemas import ( from .utils import async_get_usb_ports STEP_PLM = "plm" +STEP_PLM_MANUALLY = "plm_manually" STEP_HUB_V1 = "hubv1" STEP_HUB_V2 = "hubv2" STEP_CHANGE_HUB_CONFIG = "change_hub_config" @@ -55,6 +57,7 @@ STEP_ADD_OVERRIDE = "add_override" STEP_REMOVE_OVERRIDE = "remove_override" STEP_REMOVE_X10 = "remove_x10" MODEM_TYPE = "modem_type" +PLM_MANUAL = "manual" _LOGGER = logging.getLogger(__name__) @@ -129,16 +132,35 @@ class InsteonFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Set up the PLM modem type.""" errors = {} if user_input is not None: + if user_input[CONF_DEVICE] == PLM_MANUAL: + return await self.async_step_plm_manually() if await _async_connect(**user_input): return self.async_create_entry(title="", data=user_input) errors["base"] = "cannot_connect" schema_defaults = user_input if user_input is not None else {} ports = await async_get_usb_ports(self.hass) + if not ports: + return await self.async_step_plm_manually() + ports[PLM_MANUAL] = "Enter manually" data_schema = build_plm_schema(ports, **schema_defaults) return self.async_show_form( step_id=STEP_PLM, data_schema=data_schema, errors=errors ) + async def async_step_plm_manually(self, user_input=None): + """Set up the PLM modem type manually.""" + errors = {} + schema_defaults = {} + if user_input is not None: + if await _async_connect(**user_input): + return self.async_create_entry(title="", data=user_input) + errors["base"] = "cannot_connect" + schema_defaults = user_input + data_schema = build_plm_manual_schema(**schema_defaults) + return self.async_show_form( + step_id=STEP_PLM_MANUALLY, data_schema=data_schema, errors=errors + ) + async def async_step_hubv1(self, user_input=None): """Set up the Hub v1 modem type.""" return await self._async_setup_hub(hub_version=1, user_input=user_input) diff --git a/homeassistant/components/insteon/manifest.json b/homeassistant/components/insteon/manifest.json index 5fa45a16fb6..1d4eee4a058 100644 --- a/homeassistant/components/insteon/manifest.json +++ b/homeassistant/components/insteon/manifest.json @@ -17,7 +17,7 @@ "iot_class": "local_push", "loggers": ["pyinsteon", "pypubsub"], "requirements": [ - "pyinsteon==1.5.1", + "pyinsteon==1.5.2", "insteon-frontend-home-assistant==0.4.0" ], "usb": [ diff --git a/homeassistant/components/insteon/schemas.py b/homeassistant/components/insteon/schemas.py index e6b22a8cbb9..497af743195 100644 --- a/homeassistant/components/insteon/schemas.py +++ b/homeassistant/components/insteon/schemas.py @@ -195,6 +195,11 @@ def build_plm_schema(ports: dict[str, str], device=vol.UNDEFINED): return vol.Schema({vol.Required(CONF_DEVICE, default=device): vol.In(ports)}) +def build_plm_manual_schema(device=vol.UNDEFINED): + """Build the manual PLM schema for config flow.""" + return vol.Schema({vol.Required(CONF_DEVICE, default=device): str}) + + def build_hub_schema( hub_version, host=vol.UNDEFINED, diff --git a/homeassistant/components/iotawatt/strings.json b/homeassistant/components/iotawatt/strings.json index f21dfe0cd09..266b32c5c31 100644 --- a/homeassistant/components/iotawatt/strings.json +++ b/homeassistant/components/iotawatt/strings.json @@ -4,6 +4,9 @@ "user": { "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your IoTaWatt device." } }, "auth": { diff --git a/homeassistant/components/ipp/sensor.py b/homeassistant/components/ipp/sensor.py index 3bc7035e26b..a2cb5cd34dc 100644 --- a/homeassistant/components/ipp/sensor.py +++ b/homeassistant/components/ipp/sensor.py @@ -12,6 +12,7 @@ from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, + SensorStateClass, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_LOCATION, PERCENTAGE, EntityCategory @@ -119,6 +120,7 @@ async def async_setup_entry( name=marker.name, icon="mdi:water", native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, attributes_fn=_get_marker_attributes_fn( index, lambda marker: { diff --git a/homeassistant/components/islamic_prayer_times/coordinator.py b/homeassistant/components/islamic_prayer_times/coordinator.py index 161ce7b2644..aedaf43411a 100644 --- a/homeassistant/components/islamic_prayer_times/coordinator.py +++ b/homeassistant/components/islamic_prayer_times/coordinator.py @@ -77,6 +77,7 @@ class IslamicPrayerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, datetim midnightMode=self.midnight_mode, school=self.school, date=str(dt_util.now().date()), + iso8601=True, ) return cast(dict[str, Any], calc.fetch_prayer_times()) @@ -145,9 +146,12 @@ class IslamicPrayerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, datetim async_call_later(self.hass, 60, self.async_request_update) raise UpdateFailed from err + # introduced in prayer-times-calculator 0.0.8 + prayer_times.pop("date", None) + prayer_times_info: dict[str, datetime] = {} for prayer, time in prayer_times.items(): - if prayer_time := dt_util.parse_datetime(f"{dt_util.now().date()} {time}"): + if prayer_time := dt_util.parse_datetime(time): prayer_times_info[prayer] = dt_util.as_utc(prayer_time) self.async_schedule_future_update(prayer_times_info["Midnight"]) diff --git a/homeassistant/components/keenetic_ndms2/strings.json b/homeassistant/components/keenetic_ndms2/strings.json index 13e3fabfbff..765a3fc4d47 100644 --- a/homeassistant/components/keenetic_ndms2/strings.json +++ b/homeassistant/components/keenetic_ndms2/strings.json @@ -9,6 +9,9 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your Keenetic router." } } }, diff --git a/homeassistant/components/kmtronic/strings.json b/homeassistant/components/kmtronic/strings.json index 2a3a3a40687..6cecea12f22 100644 --- a/homeassistant/components/kmtronic/strings.json +++ b/homeassistant/components/kmtronic/strings.json @@ -6,6 +6,9 @@ "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your KMtronic device." } } }, diff --git a/homeassistant/components/kodi/strings.json b/homeassistant/components/kodi/strings.json index 51431b317d6..7c7d53b33ac 100644 --- a/homeassistant/components/kodi/strings.json +++ b/homeassistant/components/kodi/strings.json @@ -8,6 +8,9 @@ "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", "ssl": "[%key:common::config_flow::data::ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of the system hosting your Kodi server." } }, "discovery_confirm": { diff --git a/homeassistant/components/komfovent/__init__.py b/homeassistant/components/komfovent/__init__.py deleted file mode 100644 index 0366a429b21..00000000000 --- a/homeassistant/components/komfovent/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -"""The Komfovent integration.""" -from __future__ import annotations - -import komfovent_api - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady - -from .const import DOMAIN - -PLATFORMS: list[Platform] = [Platform.CLIMATE] - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Komfovent from a config entry.""" - host = entry.data[CONF_HOST] - username = entry.data[CONF_USERNAME] - password = entry.data[CONF_PASSWORD] - _, credentials = komfovent_api.get_credentials(host, username, password) - result, settings = await komfovent_api.get_settings(credentials) - if result != komfovent_api.KomfoventConnectionResult.SUCCESS: - raise ConfigEntryNotReady(f"Unable to connect to {host}: {result}") - - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = (credentials, settings) - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/komfovent/climate.py b/homeassistant/components/komfovent/climate.py deleted file mode 100644 index 2e51fddf4f2..00000000000 --- a/homeassistant/components/komfovent/climate.py +++ /dev/null @@ -1,91 +0,0 @@ -"""Ventilation Units from Komfovent integration.""" -from __future__ import annotations - -import komfovent_api - -from homeassistant.components.climate import ( - ClimateEntity, - ClimateEntityFeature, - HVACMode, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfTemperature -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .const import DOMAIN - -HASS_TO_KOMFOVENT_MODES = { - HVACMode.COOL: komfovent_api.KomfoventModes.COOL, - HVACMode.HEAT_COOL: komfovent_api.KomfoventModes.HEAT_COOL, - HVACMode.OFF: komfovent_api.KomfoventModes.OFF, - HVACMode.AUTO: komfovent_api.KomfoventModes.AUTO, -} -KOMFOVENT_TO_HASS_MODES = {v: k for k, v in HASS_TO_KOMFOVENT_MODES.items()} - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Komfovent unit control.""" - credentials, settings = hass.data[DOMAIN][entry.entry_id] - async_add_entities([KomfoventDevice(credentials, settings)], True) - - -class KomfoventDevice(ClimateEntity): - """Representation of a ventilation unit.""" - - _attr_hvac_modes = list(HASS_TO_KOMFOVENT_MODES.keys()) - _attr_preset_modes = [mode.name for mode in komfovent_api.KomfoventPresets] - _attr_supported_features = ClimateEntityFeature.PRESET_MODE - _attr_temperature_unit = UnitOfTemperature.CELSIUS - _attr_has_entity_name = True - _attr_name = None - - def __init__( - self, - credentials: komfovent_api.KomfoventCredentials, - settings: komfovent_api.KomfoventSettings, - ) -> None: - """Initialize the ventilation unit.""" - self._komfovent_credentials = credentials - self._komfovent_settings = settings - - self._attr_unique_id = settings.serial_number - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, settings.serial_number)}, - model=settings.model, - name=settings.name, - serial_number=settings.serial_number, - sw_version=settings.version, - manufacturer="Komfovent", - ) - - async def async_set_preset_mode(self, preset_mode: str) -> None: - """Set new target preset mode.""" - await komfovent_api.set_preset( - self._komfovent_credentials, - komfovent_api.KomfoventPresets[preset_mode], - ) - - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set new target hvac mode.""" - await komfovent_api.set_mode( - self._komfovent_credentials, HASS_TO_KOMFOVENT_MODES[hvac_mode] - ) - - async def async_update(self) -> None: - """Get the latest data.""" - result, status = await komfovent_api.get_unit_status( - self._komfovent_credentials - ) - if result != komfovent_api.KomfoventConnectionResult.SUCCESS or not status: - self._attr_available = False - return - self._attr_available = True - self._attr_preset_mode = status.preset - self._attr_current_temperature = status.temp_extract - self._attr_hvac_mode = KOMFOVENT_TO_HASS_MODES[status.mode] diff --git a/homeassistant/components/komfovent/config_flow.py b/homeassistant/components/komfovent/config_flow.py deleted file mode 100644 index fb5390a30c6..00000000000 --- a/homeassistant/components/komfovent/config_flow.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Config flow for Komfovent integration.""" -from __future__ import annotations - -import logging -from typing import Any - -import komfovent_api -import voluptuous as vol - -from homeassistant import config_entries -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME -from homeassistant.data_entry_flow import FlowResult - -from .const import DOMAIN - -_LOGGER = logging.getLogger(__name__) - -STEP_USER = "user" -STEP_USER_DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): str, - vol.Optional(CONF_USERNAME, default="user"): str, - vol.Required(CONF_PASSWORD): str, - } -) - -ERRORS_MAP = { - komfovent_api.KomfoventConnectionResult.NOT_FOUND: "cannot_connect", - komfovent_api.KomfoventConnectionResult.UNAUTHORISED: "invalid_auth", - komfovent_api.KomfoventConnectionResult.INVALID_INPUT: "invalid_input", -} - - -class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): - """Handle a config flow for Komfovent.""" - - VERSION = 1 - - def __return_error( - self, result: komfovent_api.KomfoventConnectionResult - ) -> FlowResult: - return self.async_show_form( - step_id=STEP_USER, - data_schema=STEP_USER_DATA_SCHEMA, - errors={"base": ERRORS_MAP.get(result, "unknown")}, - ) - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> FlowResult: - """Handle the initial step.""" - if user_input is None: - return self.async_show_form( - step_id=STEP_USER, data_schema=STEP_USER_DATA_SCHEMA - ) - - conf_host = user_input[CONF_HOST] - conf_username = user_input[CONF_USERNAME] - conf_password = user_input[CONF_PASSWORD] - - result, credentials = komfovent_api.get_credentials( - conf_host, conf_username, conf_password - ) - if result != komfovent_api.KomfoventConnectionResult.SUCCESS: - return self.__return_error(result) - - result, settings = await komfovent_api.get_settings(credentials) - if result != komfovent_api.KomfoventConnectionResult.SUCCESS: - return self.__return_error(result) - - await self.async_set_unique_id(settings.serial_number) - self._abort_if_unique_id_configured() - - return self.async_create_entry(title=settings.name, data=user_input) diff --git a/homeassistant/components/komfovent/const.py b/homeassistant/components/komfovent/const.py deleted file mode 100644 index a7881a58c41..00000000000 --- a/homeassistant/components/komfovent/const.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Constants for the Komfovent integration.""" - -DOMAIN = "komfovent" diff --git a/homeassistant/components/komfovent/manifest.json b/homeassistant/components/komfovent/manifest.json deleted file mode 100644 index cbe00ef8dc5..00000000000 --- a/homeassistant/components/komfovent/manifest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "domain": "komfovent", - "name": "Komfovent", - "codeowners": ["@ProstoSanja"], - "config_flow": true, - "documentation": "https://www.home-assistant.io/integrations/komfovent", - "iot_class": "local_polling", - "requirements": ["komfovent-api==0.0.3"] -} diff --git a/homeassistant/components/kostal_plenticore/helper.py b/homeassistant/components/kostal_plenticore/helper.py index 1c495ac9db9..adb1bfb6f09 100644 --- a/homeassistant/components/kostal_plenticore/helper.py +++ b/homeassistant/components/kostal_plenticore/helper.py @@ -3,13 +3,18 @@ from __future__ import annotations import asyncio from collections import defaultdict -from collections.abc import Callable +from collections.abc import Callable, Mapping from datetime import datetime, timedelta import logging from typing import Any, TypeVar, cast from aiohttp.client_exceptions import ClientError -from pykoplenti import ApiClient, ApiException, AuthenticationException +from pykoplenti import ( + ApiClient, + ApiException, + AuthenticationException, + ExtendedApiClient, +) from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP from homeassistant.core import CALLBACK_TYPE, HomeAssistant @@ -51,7 +56,9 @@ class Plenticore: async def async_setup(self) -> bool: """Set up Plenticore API client.""" - self._client = ApiClient(async_get_clientsession(self.hass), host=self.host) + self._client = ExtendedApiClient( + async_get_clientsession(self.hass), host=self.host + ) try: await self._client.login(self.config_entry.data[CONF_PASSWORD]) except AuthenticationException as err: @@ -124,7 +131,7 @@ class DataUpdateCoordinatorMixin: async def async_read_data( self, module_id: str, data_id: str - ) -> dict[str, dict[str, str]] | None: + ) -> Mapping[str, Mapping[str, str]] | None: """Read data from Plenticore.""" if (client := self._plenticore.client) is None: return None @@ -190,7 +197,7 @@ class PlenticoreUpdateCoordinator(DataUpdateCoordinator[_DataT]): class ProcessDataUpdateCoordinator( - PlenticoreUpdateCoordinator[dict[str, dict[str, str]]] + PlenticoreUpdateCoordinator[Mapping[str, Mapping[str, str]]] ): """Implementation of PlenticoreUpdateCoordinator for process data.""" @@ -206,18 +213,19 @@ class ProcessDataUpdateCoordinator( return { module_id: { process_data.id: process_data.value - for process_data in fetched_data[module_id] + for process_data in fetched_data[module_id].values() } for module_id in fetched_data } class SettingDataUpdateCoordinator( - PlenticoreUpdateCoordinator[dict[str, dict[str, str]]], DataUpdateCoordinatorMixin + PlenticoreUpdateCoordinator[Mapping[str, Mapping[str, str]]], + DataUpdateCoordinatorMixin, ): """Implementation of PlenticoreUpdateCoordinator for settings data.""" - async def _async_update_data(self) -> dict[str, dict[str, str]]: + async def _async_update_data(self) -> Mapping[str, Mapping[str, str]]: client = self._plenticore.client if not self._fetch or client is None: diff --git a/homeassistant/components/kostal_plenticore/manifest.json b/homeassistant/components/kostal_plenticore/manifest.json index 95f4a194977..d65368e7ee4 100644 --- a/homeassistant/components/kostal_plenticore/manifest.json +++ b/homeassistant/components/kostal_plenticore/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/kostal_plenticore", "iot_class": "local_polling", "loggers": ["kostal"], - "requirements": ["pykoplenti==1.0.0"] + "requirements": ["pykoplenti==1.2.2"] } diff --git a/homeassistant/components/kostal_plenticore/sensor.py b/homeassistant/components/kostal_plenticore/sensor.py index f7bad638df4..ce18867511d 100644 --- a/homeassistant/components/kostal_plenticore/sensor.py +++ b/homeassistant/components/kostal_plenticore/sensor.py @@ -649,6 +649,39 @@ SENSOR_PROCESS_DATA = [ state_class=SensorStateClass.TOTAL_INCREASING, formatter="format_energy", ), + PlenticoreSensorEntityDescription( + module_id="scb:statistic:EnergyFlow", + key="Statistic:EnergyDischarge:Day", + name="Battery Discharge Day", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + formatter="format_energy", + ), + PlenticoreSensorEntityDescription( + module_id="scb:statistic:EnergyFlow", + key="Statistic:EnergyDischarge:Month", + name="Battery Discharge Month", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + formatter="format_energy", + ), + PlenticoreSensorEntityDescription( + module_id="scb:statistic:EnergyFlow", + key="Statistic:EnergyDischarge:Year", + name="Battery Discharge Year", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + formatter="format_energy", + ), + PlenticoreSensorEntityDescription( + module_id="scb:statistic:EnergyFlow", + key="Statistic:EnergyDischarge:Total", + name="Battery Discharge Total", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + formatter="format_energy", + ), PlenticoreSensorEntityDescription( module_id="scb:statistic:EnergyFlow", key="Statistic:EnergyDischargeGrid:Day", @@ -682,6 +715,52 @@ SENSOR_PROCESS_DATA = [ state_class=SensorStateClass.TOTAL_INCREASING, formatter="format_energy", ), + PlenticoreSensorEntityDescription( + module_id="_virt_", + key="pv_P", + name="Sum power of all PV DC inputs", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + entity_registry_enabled_default=True, + state_class=SensorStateClass.MEASUREMENT, + formatter="format_round", + ), + PlenticoreSensorEntityDescription( + module_id="_virt_", + key="Statistic:EnergyGrid:Total", + name="Energy to Grid Total", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + formatter="format_energy", + ), + PlenticoreSensorEntityDescription( + module_id="_virt_", + key="Statistic:EnergyGrid:Year", + name="Energy to Grid Year", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + formatter="format_energy", + ), + PlenticoreSensorEntityDescription( + module_id="_virt_", + key="Statistic:EnergyGrid:Month", + name="Energy to Grid Month", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + formatter="format_energy", + ), + PlenticoreSensorEntityDescription( + module_id="_virt_", + key="Statistic:EnergyGrid:Day", + name="Energy to Grid Day", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + formatter="format_energy", + ), ] diff --git a/homeassistant/components/kraken/sensor.py b/homeassistant/components/kraken/sensor.py index a6c00e62b62..21eb3f2e5a1 100644 --- a/homeassistant/components/kraken/sensor.py +++ b/homeassistant/components/kraken/sensor.py @@ -259,7 +259,8 @@ class KrakenSensor( return try: self._attr_native_value = self.entity_description.value_fn( - self.coordinator, self.tracked_asset_pair_wsname # type: ignore[arg-type] + self.coordinator, # type: ignore[arg-type] + self.tracked_asset_pair_wsname, ) self._received_data_at_least_once = True except KeyError: diff --git a/homeassistant/components/lacrosse/sensor.py b/homeassistant/components/lacrosse/sensor.py index 7355a60f5f0..40d38da55eb 100644 --- a/homeassistant/components/lacrosse/sensor.py +++ b/homeassistant/components/lacrosse/sensor.py @@ -209,7 +209,7 @@ class LaCrosseHumidity(LaCrosseSensor): _attr_native_unit_of_measurement = PERCENTAGE _attr_state_class = SensorStateClass.MEASUREMENT - _attr_icon = "mdi:water-percent" + _attr_device_class = SensorDeviceClass.HUMIDITY @property def native_value(self) -> int | None: diff --git a/homeassistant/components/lametric/button.py b/homeassistant/components/lametric/button.py index 18a0c2f8f72..1de8c1d1717 100644 --- a/homeassistant/components/lametric/button.py +++ b/homeassistant/components/lametric/button.py @@ -19,20 +19,13 @@ from .entity import LaMetricEntity from .helpers import lametric_exception_handler -@dataclass -class LaMetricButtonEntityDescriptionMixin: - """Mixin values for LaMetric entities.""" +@dataclass(kw_only=True) +class LaMetricButtonEntityDescription(ButtonEntityDescription): + """Class describing LaMetric button entities.""" press_fn: Callable[[LaMetricDevice], Awaitable[Any]] -@dataclass -class LaMetricButtonEntityDescription( - ButtonEntityDescription, LaMetricButtonEntityDescriptionMixin -): - """Class describing LaMetric button entities.""" - - BUTTONS = [ LaMetricButtonEntityDescription( key="app_next", diff --git a/homeassistant/components/lametric/number.py b/homeassistant/components/lametric/number.py index da458cab61e..d8c70494264 100644 --- a/homeassistant/components/lametric/number.py +++ b/homeassistant/components/lametric/number.py @@ -19,21 +19,14 @@ from .entity import LaMetricEntity from .helpers import lametric_exception_handler -@dataclass -class LaMetricEntityDescriptionMixin: - """Mixin values for LaMetric entities.""" +@dataclass(kw_only=True) +class LaMetricNumberEntityDescription(NumberEntityDescription): + """Class describing LaMetric number entities.""" value_fn: Callable[[Device], int | None] set_value_fn: Callable[[LaMetricDevice, float], Awaitable[Any]] -@dataclass -class LaMetricNumberEntityDescription( - NumberEntityDescription, LaMetricEntityDescriptionMixin -): - """Class describing LaMetric number entities.""" - - NUMBERS = [ LaMetricNumberEntityDescription( key="brightness", diff --git a/homeassistant/components/lametric/select.py b/homeassistant/components/lametric/select.py index b7c0e55745e..f15147235ac 100644 --- a/homeassistant/components/lametric/select.py +++ b/homeassistant/components/lametric/select.py @@ -19,21 +19,14 @@ from .entity import LaMetricEntity from .helpers import lametric_exception_handler -@dataclass -class LaMetricEntityDescriptionMixin: - """Mixin values for LaMetric entities.""" +@dataclass(kw_only=True) +class LaMetricSelectEntityDescription(SelectEntityDescription): + """Class describing LaMetric select entities.""" current_fn: Callable[[Device], str] select_fn: Callable[[LaMetricDevice, str], Awaitable[Any]] -@dataclass -class LaMetricSelectEntityDescription( - SelectEntityDescription, LaMetricEntityDescriptionMixin -): - """Class describing LaMetric select entities.""" - - SELECTS = [ LaMetricSelectEntityDescription( key="brightness_mode", diff --git a/homeassistant/components/lametric/sensor.py b/homeassistant/components/lametric/sensor.py index 6cddf81b2bf..88d461e9d4f 100644 --- a/homeassistant/components/lametric/sensor.py +++ b/homeassistant/components/lametric/sensor.py @@ -21,20 +21,13 @@ from .coordinator import LaMetricDataUpdateCoordinator from .entity import LaMetricEntity -@dataclass -class LaMetricEntityDescriptionMixin: - """Mixin values for LaMetric entities.""" +@dataclass(kw_only=True) +class LaMetricSensorEntityDescription(SensorEntityDescription): + """Class describing LaMetric sensor entities.""" value_fn: Callable[[Device], int | None] -@dataclass -class LaMetricSensorEntityDescription( - SensorEntityDescription, LaMetricEntityDescriptionMixin -): - """Class describing LaMetric sensor entities.""" - - SENSORS = [ LaMetricSensorEntityDescription( key="rssi", diff --git a/homeassistant/components/lametric/switch.py b/homeassistant/components/lametric/switch.py index c33ec16d617..ace492fe0cb 100644 --- a/homeassistant/components/lametric/switch.py +++ b/homeassistant/components/lametric/switch.py @@ -19,21 +19,13 @@ from .entity import LaMetricEntity from .helpers import lametric_exception_handler -@dataclass -class LaMetricEntityDescriptionMixin: - """Mixin values for LaMetric entities.""" - - is_on_fn: Callable[[Device], bool] - set_fn: Callable[[LaMetricDevice, bool], Awaitable[Any]] - - -@dataclass -class LaMetricSwitchEntityDescription( - SwitchEntityDescription, LaMetricEntityDescriptionMixin -): +@dataclass(kw_only=True) +class LaMetricSwitchEntityDescription(SwitchEntityDescription): """Class describing LaMetric switch entities.""" available_fn: Callable[[Device], bool] = lambda device: True + is_on_fn: Callable[[Device], bool] + set_fn: Callable[[LaMetricDevice, bool], Awaitable[Any]] SWITCHES = [ diff --git a/homeassistant/components/landisgyr_heat_meter/sensor.py b/homeassistant/components/landisgyr_heat_meter/sensor.py index 8ef81e899b7..d7485e88fb0 100644 --- a/homeassistant/components/landisgyr_heat_meter/sensor.py +++ b/homeassistant/components/landisgyr_heat_meter/sensor.py @@ -316,7 +316,9 @@ class HeatMeterSensor( """Set up the sensor with the initial values.""" super().__init__(coordinator) self.key = description.key - self._attr_unique_id = f"{coordinator.config_entry.data['device_number']}_{description.key}" # type: ignore[union-attr] + self._attr_unique_id = ( + f"{coordinator.config_entry.data['device_number']}_{description.key}" # type: ignore[union-attr] + ) self._attr_name = f"Heat Meter {description.name}" self.entity_description = description self._attr_device_info = device diff --git a/homeassistant/components/ld2410_ble/manifest.json b/homeassistant/components/ld2410_ble/manifest.json index 7996376b6ac..a90b5a71c2d 100644 --- a/homeassistant/components/ld2410_ble/manifest.json +++ b/homeassistant/components/ld2410_ble/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/ld2410_ble", "integration_type": "device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.14.0", "ld2410-ble==0.1.1"] + "requirements": ["bluetooth-data-tools==1.16.0", "ld2410-ble==0.1.1"] } diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index 21543ad6788..ca46565b773 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -32,5 +32,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.14.0", "led-ble==1.0.1"] + "requirements": ["bluetooth-data-tools==1.16.0", "led-ble==1.0.1"] } diff --git a/homeassistant/components/lg_soundbar/strings.json b/homeassistant/components/lg_soundbar/strings.json index 8c6a9909ff5..ee16a39350c 100644 --- a/homeassistant/components/lg_soundbar/strings.json +++ b/homeassistant/components/lg_soundbar/strings.json @@ -4,6 +4,9 @@ "user": { "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your LG Soundbar." } } }, diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 7cabfd4712f..39412780331 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -29,9 +29,11 @@ "LIFX GU10", "LIFX Lightstrip", "LIFX Mini", + "LIFX Neon", "LIFX Nightvision", "LIFX Pls", "LIFX Plus", + "LIFX String", "LIFX Tile", "LIFX White", "LIFX Z" @@ -40,8 +42,8 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==0.8.10", + "aiolifx==1.0.0", "aiolifx-effects==0.3.2", - "aiolifx-themes==0.4.5" + "aiolifx-themes==0.4.10" ] } diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 78cccde5890..3bb3797c284 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -500,6 +500,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: ) elif ColorMode.XY in supported_color_modes: params[ATTR_XY_COLOR] = color_util.color_hs_to_xy(*hs_color) + elif ColorMode.COLOR_TEMP in supported_color_modes: + xy_color = color_util.color_hs_to_xy(*hs_color) + params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( + *xy_color + ) + params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) elif ATTR_RGB_COLOR in params and ColorMode.RGB not in supported_color_modes: assert (rgb_color := params.pop(ATTR_RGB_COLOR)) is not None if ColorMode.RGBW in supported_color_modes: @@ -515,6 +523,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_HS_COLOR] = color_util.color_RGB_to_hs(*rgb_color) elif ColorMode.XY in supported_color_modes: params[ATTR_XY_COLOR] = color_util.color_RGB_to_xy(*rgb_color) + elif ColorMode.COLOR_TEMP in supported_color_modes: + xy_color = color_util.color_RGB_to_xy(*rgb_color) + params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( + *xy_color + ) + params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) elif ATTR_XY_COLOR in params and ColorMode.XY not in supported_color_modes: xy_color = params.pop(ATTR_XY_COLOR) if ColorMode.HS in supported_color_modes: @@ -529,6 +545,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_RGBWW_COLOR] = color_util.color_rgb_to_rgbww( *rgb_color, light.min_color_temp_kelvin, light.max_color_temp_kelvin ) + elif ColorMode.COLOR_TEMP in supported_color_modes: + params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( + *xy_color + ) + params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) elif ATTR_RGBW_COLOR in params and ColorMode.RGBW not in supported_color_modes: rgbw_color = params.pop(ATTR_RGBW_COLOR) rgb_color = color_util.color_rgbw_to_rgb(*rgbw_color) @@ -542,6 +565,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_HS_COLOR] = color_util.color_RGB_to_hs(*rgb_color) elif ColorMode.XY in supported_color_modes: params[ATTR_XY_COLOR] = color_util.color_RGB_to_xy(*rgb_color) + elif ColorMode.COLOR_TEMP in supported_color_modes: + xy_color = color_util.color_RGB_to_xy(*rgb_color) + params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( + *xy_color + ) + params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) elif ( ATTR_RGBWW_COLOR in params and ColorMode.RGBWW not in supported_color_modes ): @@ -558,6 +589,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_HS_COLOR] = color_util.color_RGB_to_hs(*rgb_color) elif ColorMode.XY in supported_color_modes: params[ATTR_XY_COLOR] = color_util.color_RGB_to_xy(*rgb_color) + elif ColorMode.COLOR_TEMP in supported_color_modes: + xy_color = color_util.color_RGB_to_xy(*rgb_color) + params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( + *xy_color + ) + params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) # If white is set to True, set it to the light's brightness # Add a warning in Home Assistant Core 2023.5 if the brightness is set to an diff --git a/homeassistant/components/light/reproduce_state.py b/homeassistant/components/light/reproduce_state.py index f055f02ebda..54fcd01843c 100644 --- a/homeassistant/components/light/reproduce_state.py +++ b/homeassistant/components/light/reproduce_state.py @@ -17,15 +17,10 @@ from homeassistant.core import Context, HomeAssistant, State from . import ( ATTR_BRIGHTNESS, - ATTR_BRIGHTNESS_PCT, ATTR_COLOR_MODE, - ATTR_COLOR_NAME, ATTR_COLOR_TEMP, ATTR_EFFECT, - ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, - ATTR_PROFILE, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -40,13 +35,7 @@ _LOGGER = logging.getLogger(__name__) VALID_STATES = {STATE_ON, STATE_OFF} -ATTR_GROUP = [ - ATTR_BRIGHTNESS, - ATTR_BRIGHTNESS_PCT, - ATTR_EFFECT, - ATTR_FLASH, - ATTR_TRANSITION, -] +ATTR_GROUP = [ATTR_BRIGHTNESS, ATTR_EFFECT] COLOR_GROUP = [ ATTR_HS_COLOR, @@ -55,10 +44,6 @@ COLOR_GROUP = [ ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, ATTR_XY_COLOR, - # The following color attributes are deprecated - ATTR_PROFILE, - ATTR_COLOR_NAME, - ATTR_KELVIN, ] @@ -79,21 +64,6 @@ COLOR_MODE_TO_ATTRIBUTE = { ColorMode.XY: ColorModeAttr(ATTR_XY_COLOR, ATTR_XY_COLOR), } -DEPRECATED_GROUP = [ - ATTR_BRIGHTNESS_PCT, - ATTR_COLOR_NAME, - ATTR_FLASH, - ATTR_KELVIN, - ATTR_PROFILE, - ATTR_TRANSITION, -] - -DEPRECATION_WARNING = ( - "The use of other attributes than device state attributes is deprecated and will be" - " removed in a future release. Invalid attributes are %s. Read the logs for further" - " details: https://www.home-assistant.io/integrations/scene/" -) - def _color_mode_same(cur_state: State, state: State) -> bool: """Test if color_mode is same.""" @@ -124,11 +94,6 @@ async def _async_reproduce_state( ) return - # Warn if deprecated attributes are used - deprecated_attrs = [attr for attr in state.attributes if attr in DEPRECATED_GROUP] - if deprecated_attrs: - _LOGGER.warning(DEPRECATION_WARNING, deprecated_attrs) - # Return if we are already at the right state. if ( cur_state.state == state.state diff --git a/homeassistant/components/light/services.yaml b/homeassistant/components/light/services.yaml index 433da53a570..fb7a1539944 100644 --- a/homeassistant/components/light/services.yaml +++ b/homeassistant/components/light/services.yaml @@ -252,8 +252,9 @@ turn_on: - light.ColorMode.RGBWW selector: color_temp: - min_mireds: 153 - max_mireds: 500 + unit: "mired" + min: 153 + max: 500 kelvin: filter: attribute: @@ -266,11 +267,10 @@ turn_on: - light.ColorMode.RGBWW advanced: true selector: - number: + color_temp: + unit: "kelvin" min: 2000 max: 6500 - step: 100 - unit_of_measurement: K brightness: filter: attribute: @@ -637,11 +637,10 @@ toggle: - light.ColorMode.RGBWW advanced: true selector: - number: + color_temp: + unit: "kelvin" min: 2000 max: 6500 - step: 100 - unit_of_measurement: K brightness: filter: attribute: diff --git a/homeassistant/components/linear_garage_door/__init__.py b/homeassistant/components/linear_garage_door/__init__.py new file mode 100644 index 00000000000..d168da511e0 --- /dev/null +++ b/homeassistant/components/linear_garage_door/__init__.py @@ -0,0 +1,32 @@ +"""The Linear Garage Door integration.""" +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import LinearUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.COVER] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Linear Garage Door from a config entry.""" + + coordinator = LinearUpdateCoordinator(hass, entry) + + await coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + hass.data[DOMAIN].pop(entry.entry_id) + + return unload_ok diff --git a/homeassistant/components/linear_garage_door/config_flow.py b/homeassistant/components/linear_garage_door/config_flow.py new file mode 100644 index 00000000000..6bca49adb4c --- /dev/null +++ b/homeassistant/components/linear_garage_door/config_flow.py @@ -0,0 +1,166 @@ +"""Config flow for Linear Garage Door integration.""" +from __future__ import annotations + +from collections.abc import Collection, Mapping, Sequence +import logging +from typing import Any +import uuid + +from linear_garage_door import Linear +from linear_garage_door.errors import InvalidLoginError +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResult +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = { + vol.Required(CONF_EMAIL): str, + vol.Required(CONF_PASSWORD): str, +} + + +async def validate_input( + hass: HomeAssistant, + data: dict[str, str], +) -> dict[str, Sequence[Collection[str]]]: + """Validate the user input allows us to connect. + + Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. + """ + + hub = Linear() + + device_id = str(uuid.uuid4()) + try: + await hub.login( + data["email"], + data["password"], + device_id=device_id, + client_session=async_get_clientsession(hass), + ) + + sites = await hub.get_sites() + except InvalidLoginError as err: + raise InvalidAuth from err + finally: + await hub.close() + + info = { + "email": data["email"], + "password": data["password"], + "sites": sites, + "device_id": device_id, + } + + return info + + +class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for Linear Garage Door.""" + + VERSION = 1 + + def __init__(self) -> None: + """Initialize the config flow.""" + self.data: dict[str, Sequence[Collection[str]]] = {} + self._reauth_entry: config_entries.ConfigEntry | None = None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle the initial step.""" + data_schema = STEP_USER_DATA_SCHEMA + + data_schema = vol.Schema(data_schema) + + if user_input is None: + return self.async_show_form(step_id="user", data_schema=data_schema) + + errors = {} + + try: + info = await validate_input(self.hass, user_input) + except InvalidAuth: + errors["base"] = "invalid_auth" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + self.data = info + + # Check if we are reauthenticating + if self._reauth_entry is not None: + self.hass.config_entries.async_update_entry( + self._reauth_entry, + data=self._reauth_entry.data + | {"email": self.data["email"], "password": self.data["password"]}, + ) + await self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + return self.async_abort(reason="reauth_successful") + + return await self.async_step_site() + + return self.async_show_form( + step_id="user", data_schema=data_schema, errors=errors + ) + + async def async_step_site( + self, + user_input: dict[str, Any] | None = None, + ) -> FlowResult: + """Handle the site step.""" + + if isinstance(self.data["sites"], list): + sites: list[dict[str, str]] = self.data["sites"] + + if not user_input: + return self.async_show_form( + step_id="site", + data_schema=vol.Schema( + { + vol.Required("site"): vol.In( + {site["id"]: site["name"] for site in sites} + ) + } + ), + ) + + site_id = user_input["site"] + + site_name = next(site["name"] for site in sites if site["id"] == site_id) + + await self.async_set_unique_id(site_id) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=site_name, + data={ + "site_id": site_id, + "email": self.data["email"], + "password": self.data["password"], + "device_id": self.data["device_id"], + }, + ) + + async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult: + """Reauth in case of a password change or other error.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_user() + + +class InvalidAuth(HomeAssistantError): + """Error to indicate there is invalid auth.""" + + +class InvalidDeviceID(HomeAssistantError): + """Error to indicate there is invalid device ID.""" diff --git a/homeassistant/components/linear_garage_door/const.py b/homeassistant/components/linear_garage_door/const.py new file mode 100644 index 00000000000..7b3625c7c67 --- /dev/null +++ b/homeassistant/components/linear_garage_door/const.py @@ -0,0 +1,3 @@ +"""Constants for the Linear Garage Door integration.""" + +DOMAIN = "linear_garage_door" diff --git a/homeassistant/components/linear_garage_door/coordinator.py b/homeassistant/components/linear_garage_door/coordinator.py new file mode 100644 index 00000000000..5a17d5a39e4 --- /dev/null +++ b/homeassistant/components/linear_garage_door/coordinator.py @@ -0,0 +1,81 @@ +"""DataUpdateCoordinator for Linear.""" +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Any + +from linear_garage_door import Linear +from linear_garage_door.errors import InvalidLoginError, ResponseError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +class LinearUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """DataUpdateCoordinator for Linear.""" + + _email: str + _password: str + _device_id: str + _site_id: str + _devices: list[dict[str, list[str] | str]] | None + _linear: Linear + + def __init__( + self, + hass: HomeAssistant, + entry: ConfigEntry, + ) -> None: + """Initialize DataUpdateCoordinator for Linear.""" + self._email = entry.data["email"] + self._password = entry.data["password"] + self._device_id = entry.data["device_id"] + self._site_id = entry.data["site_id"] + self._devices = None + + super().__init__( + hass, + _LOGGER, + name="Linear Garage Door", + update_interval=timedelta(seconds=60), + ) + + async def _async_update_data(self) -> dict[str, Any]: + """Get the data for Linear.""" + + linear = Linear() + + try: + await linear.login( + email=self._email, + password=self._password, + device_id=self._device_id, + ) + except InvalidLoginError as err: + if ( + str(err) + == "Login error: Login provided is invalid, please check the email and password" + ): + raise ConfigEntryAuthFailed from err + raise ConfigEntryNotReady from err + except ResponseError as err: + raise ConfigEntryNotReady from err + + if not self._devices: + self._devices = await linear.get_devices(self._site_id) + + data = {} + + for device in self._devices: + device_id = str(device["id"]) + state = await linear.get_device_state(device_id) + data[device_id] = {"name": device["name"], "subdevices": state} + + await linear.close() + + return data diff --git a/homeassistant/components/linear_garage_door/cover.py b/homeassistant/components/linear_garage_door/cover.py new file mode 100644 index 00000000000..3474e9d3acb --- /dev/null +++ b/homeassistant/components/linear_garage_door/cover.py @@ -0,0 +1,149 @@ +"""Cover entity for Linear Garage Doors.""" + +from datetime import timedelta +from typing import Any + +from linear_garage_door import Linear + +from homeassistant.components.cover import ( + CoverDeviceClass, + CoverEntity, + CoverEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import LinearUpdateCoordinator + +SUPPORTED_SUBDEVICES = ["GDO"] +PARALLEL_UPDATES = 1 +SCAN_INTERVAL = timedelta(seconds=10) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Linear Garage Door cover.""" + coordinator: LinearUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] + data = coordinator.data + + device_list: list[LinearCoverEntity] = [] + + for device_id in data: + device_list.extend( + LinearCoverEntity( + device_id=device_id, + device_name=data[device_id]["name"], + subdevice=subdev, + config_entry=config_entry, + coordinator=coordinator, + ) + for subdev in data[device_id]["subdevices"] + if subdev in SUPPORTED_SUBDEVICES + ) + async_add_entities(device_list) + + +class LinearCoverEntity(CoordinatorEntity[LinearUpdateCoordinator], CoverEntity): + """Representation of a Linear cover.""" + + _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + + def __init__( + self, + device_id: str, + device_name: str, + subdevice: str, + config_entry: ConfigEntry, + coordinator: LinearUpdateCoordinator, + ) -> None: + """Init with device ID and name.""" + super().__init__(coordinator) + + self._attr_has_entity_name = True + self._attr_name = None + self._device_id = device_id + self._device_name = device_name + self._subdevice = subdevice + self._attr_device_class = CoverDeviceClass.GARAGE + self._attr_unique_id = f"{device_id}-{subdevice}" + self._config_entry = config_entry + + def _get_data(self, data_property: str) -> str: + """Get a property of the subdevice.""" + return str( + self.coordinator.data[self._device_id]["subdevices"][self._subdevice].get( + data_property + ) + ) + + @property + def device_info(self) -> DeviceInfo: + """Return device info of a garage door.""" + return DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + name=self._device_name, + manufacturer="Linear", + model="Garage Door Opener", + ) + + @property + def is_closed(self) -> bool: + """Return if cover is closed.""" + return bool(self._get_data("Open_B") == "false") + + @property + def is_opened(self) -> bool: + """Return if cover is open.""" + return bool(self._get_data("Open_B") == "true") + + @property + def is_opening(self) -> bool: + """Return if cover is opening.""" + return bool(self._get_data("Opening_P") == "0") + + @property + def is_closing(self) -> bool: + """Return if cover is closing.""" + return bool(self._get_data("Opening_P") == "100") + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the garage door.""" + if self.is_closed: + return + + linear = Linear() + + await linear.login( + email=self._config_entry.data["email"], + password=self._config_entry.data["password"], + device_id=self._config_entry.data["device_id"], + client_session=async_get_clientsession(self.hass), + ) + + await linear.operate_device(self._device_id, self._subdevice, "Close") + await linear.close() + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the garage door.""" + if self.is_opened: + return + + linear = Linear() + + await linear.login( + email=self._config_entry.data["email"], + password=self._config_entry.data["password"], + device_id=self._config_entry.data["device_id"], + client_session=async_get_clientsession(self.hass), + ) + + await linear.operate_device(self._device_id, self._subdevice, "Open") + await linear.close() diff --git a/homeassistant/components/linear_garage_door/diagnostics.py b/homeassistant/components/linear_garage_door/diagnostics.py new file mode 100644 index 00000000000..fffcdd7de87 --- /dev/null +++ b/homeassistant/components/linear_garage_door/diagnostics.py @@ -0,0 +1,26 @@ +"""Diagnostics support for Linear Garage Door.""" +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import LinearUpdateCoordinator + +TO_REDACT = {CONF_PASSWORD, CONF_EMAIL} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator: LinearUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + + return { + "entry": async_redact_data(entry.as_dict(), TO_REDACT), + "coordinator_data": coordinator.data, + } diff --git a/homeassistant/components/linear_garage_door/manifest.json b/homeassistant/components/linear_garage_door/manifest.json new file mode 100644 index 00000000000..c7918e21e20 --- /dev/null +++ b/homeassistant/components/linear_garage_door/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "linear_garage_door", + "name": "Linear Garage Door", + "codeowners": ["@IceBotYT"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/linear_garage_door", + "iot_class": "cloud_polling", + "requirements": ["linear-garage-door==0.2.7"] +} diff --git a/homeassistant/components/linear_garage_door/strings.json b/homeassistant/components/linear_garage_door/strings.json new file mode 100644 index 00000000000..93dd17c5bce --- /dev/null +++ b/homeassistant/components/linear_garage_door/strings.json @@ -0,0 +1,20 @@ +{ + "config": { + "step": { + "user": { + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + } + } +} diff --git a/homeassistant/components/local_todo/todo.py b/homeassistant/components/local_todo/todo.py index f9832ad8730..c5cf25a8c2e 100644 --- a/homeassistant/components/local_todo/todo.py +++ b/homeassistant/components/local_todo/todo.py @@ -63,9 +63,11 @@ def _todo_dict_factory(obj: Iterable[tuple[str, Any]]) -> dict[str, str]: """Convert TodoItem dataclass items to dictionary of attributes for ical consumption.""" result: dict[str, str] = {} for name, value in obj: + if value is None: + continue if name == "status": result[name] = ICS_TODO_STATUS_MAP_INV[value] - elif value is not None: + else: result[name] = value return result @@ -88,6 +90,9 @@ class LocalTodoListEntity(TodoListEntity): | TodoListEntityFeature.DELETE_TODO_ITEM | TodoListEntityFeature.UPDATE_TODO_ITEM | TodoListEntityFeature.MOVE_TODO_ITEM + | TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM + | TodoListEntityFeature.SET_DUE_DATE_ON_ITEM + | TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM ) _attr_should_poll = False @@ -113,6 +118,8 @@ class LocalTodoListEntity(TodoListEntity): status=ICS_TODO_STATUS_MAP.get( item.status or TodoStatus.NEEDS_ACTION, TodoItemStatus.NEEDS_ACTION ), + due=item.due, + description=item.description, ) for item in self._calendar.todos ] diff --git a/homeassistant/components/lock/__init__.py b/homeassistant/components/lock/__init__.py index 8cbce69dc7c..ed7e2070055 100644 --- a/homeassistant/components/lock/__init__.py +++ b/homeassistant/components/lock/__init__.py @@ -87,40 +87,34 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def _async_lock(entity: LockEntity, service_call: ServiceCall) -> None: - """Lock the lock.""" - code: str = service_call.data.get( - ATTR_CODE, entity._lock_option_default_code # pylint: disable=protected-access - ) +@callback +def _add_default_code(entity: LockEntity, service_call: ServiceCall) -> dict[Any, Any]: + data = remove_entity_service_fields(service_call) + code: str = data.pop(ATTR_CODE, "") + if not code: + code = entity._lock_option_default_code # pylint: disable=protected-access if entity.code_format_cmp and not entity.code_format_cmp.match(code): raise ValueError( f"Code '{code}' for locking {entity.entity_id} doesn't match pattern {entity.code_format}" ) - await entity.async_lock(**remove_entity_service_fields(service_call)) + if code: + data[ATTR_CODE] = code + return data + + +async def _async_lock(entity: LockEntity, service_call: ServiceCall) -> None: + """Lock the lock.""" + await entity.async_lock(**_add_default_code(entity, service_call)) async def _async_unlock(entity: LockEntity, service_call: ServiceCall) -> None: """Unlock the lock.""" - code: str = service_call.data.get( - ATTR_CODE, entity._lock_option_default_code # pylint: disable=protected-access - ) - if entity.code_format_cmp and not entity.code_format_cmp.match(code): - raise ValueError( - f"Code '{code}' for unlocking {entity.entity_id} doesn't match pattern {entity.code_format}" - ) - await entity.async_unlock(**remove_entity_service_fields(service_call)) + await entity.async_unlock(**_add_default_code(entity, service_call)) async def _async_open(entity: LockEntity, service_call: ServiceCall) -> None: """Open the door latch.""" - code: str = service_call.data.get( - ATTR_CODE, entity._lock_option_default_code # pylint: disable=protected-access - ) - if entity.code_format_cmp and not entity.code_format_cmp.match(code): - raise ValueError( - f"Code '{code}' for opening {entity.entity_id} doesn't match pattern {entity.code_format}" - ) - await entity.async_open(**remove_entity_service_fields(service_call)) + await entity.async_open(**_add_default_code(entity, service_call)) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/lookin/__init__.py b/homeassistant/components/lookin/__init__.py index 7656de8d385..37156e9ca08 100644 --- a/homeassistant/components/lookin/__init__.py +++ b/homeassistant/components/lookin/__init__.py @@ -118,7 +118,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: push_coordinator = LookinPushCoordinator(entry.title) if lookin_device.model >= 2: - meteo_coordinator = LookinDataUpdateCoordinator[MeteoSensor]( + coordinator_class = LookinDataUpdateCoordinator[MeteoSensor] + meteo_coordinator = coordinator_class( hass, push_coordinator, name=entry.title, diff --git a/homeassistant/components/lovelace/__init__.py b/homeassistant/components/lovelace/__init__.py index 2c425bec785..daa44bf60be 100644 --- a/homeassistant/components/lovelace/__init__.py +++ b/homeassistant/components/lovelace/__init__.py @@ -4,7 +4,10 @@ import logging import voluptuous as vol from homeassistant.components import frontend, websocket_api -from homeassistant.config import async_hass_config_yaml, async_process_component_config +from homeassistant.config import ( + async_hass_config_yaml, + async_process_component_and_handle_errors, +) from homeassistant.const import CONF_FILENAME, CONF_MODE, CONF_RESOURCES from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError @@ -85,7 +88,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: integration = await async_get_integration(hass, DOMAIN) - config = await async_process_component_config(hass, conf, integration) + config = await async_process_component_and_handle_errors( + hass, conf, integration + ) if config is None: raise HomeAssistantError("Config validation failed") diff --git a/homeassistant/components/lupusec/binary_sensor.py b/homeassistant/components/lupusec/binary_sensor.py index c98e634dcb3..ee369baf8dd 100644 --- a/homeassistant/components/lupusec/binary_sensor.py +++ b/homeassistant/components/lupusec/binary_sensor.py @@ -27,7 +27,7 @@ def setup_platform( data = hass.data[LUPUSEC_DOMAIN] - device_types = CONST.TYPE_OPENING + device_types = CONST.TYPE_OPENING + CONST.TYPE_SENSOR devices = [] for device in data.lupusec.get_devices(generic_type=device_types): diff --git a/homeassistant/components/matrix/__init__.py b/homeassistant/components/matrix/__init__.py index f9ef3593fe6..ddda50aa8b2 100644 --- a/homeassistant/components/matrix/__init__.py +++ b/homeassistant/components/matrix/__init__.py @@ -348,7 +348,10 @@ class MatrixBot: self._access_tokens[self._mx_id] = token await self.hass.async_add_executor_job( - save_json, self._session_filepath, self._access_tokens, True # private=True + save_json, + self._session_filepath, + self._access_tokens, + True, # private=True ) async def _login(self) -> None: diff --git a/homeassistant/components/matter/adapter.py b/homeassistant/components/matter/adapter.py index 52b8e905b4b..2831ebe9a38 100644 --- a/homeassistant/components/matter/adapter.py +++ b/homeassistant/components/matter/adapter.py @@ -145,9 +145,7 @@ class MatterAdapter: get_clean_name(basic_info.nodeLabel) or get_clean_name(basic_info.productLabel) or get_clean_name(basic_info.productName) - or device_type.__name__ - if device_type - else None + or (device_type.__name__ if device_type else None) ) # handle bridged devices diff --git a/homeassistant/components/matter/api.py b/homeassistant/components/matter/api.py index 7b4b7d35b7f..227d0c73e89 100644 --- a/homeassistant/components/matter/api.py +++ b/homeassistant/components/matter/api.py @@ -1,9 +1,9 @@ """Handle websocket api for Matter.""" from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Coroutine from functools import wraps -from typing import Any +from typing import Any, Concatenate, ParamSpec from matter_server.common.errors import MatterError import voluptuous as vol @@ -15,6 +15,8 @@ from homeassistant.core import HomeAssistant, callback from .adapter import MatterAdapter from .helpers import get_matter +_P = ParamSpec("_P") + ID = "id" TYPE = "type" @@ -28,12 +30,19 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_set_wifi_credentials) -def async_get_matter_adapter(func: Callable) -> Callable: +def async_get_matter_adapter( + func: Callable[ + [HomeAssistant, ActiveConnection, dict[str, Any], MatterAdapter], + Coroutine[Any, Any, None], + ], +) -> Callable[ + [HomeAssistant, ActiveConnection, dict[str, Any]], Coroutine[Any, Any, None] +]: """Decorate function to get the MatterAdapter.""" @wraps(func) async def _get_matter( - hass: HomeAssistant, connection: ActiveConnection, msg: dict + hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Provide the Matter client to the function.""" matter = get_matter(hass) @@ -43,7 +52,15 @@ def async_get_matter_adapter(func: Callable) -> Callable: return _get_matter -def async_handle_failed_command(func: Callable) -> Callable: +def async_handle_failed_command( + func: Callable[ + Concatenate[HomeAssistant, ActiveConnection, dict[str, Any], _P], + Coroutine[Any, Any, None], + ], +) -> Callable[ + Concatenate[HomeAssistant, ActiveConnection, dict[str, Any], _P], + Coroutine[Any, Any, None], +]: """Decorate function to handle MatterError and send relevant error.""" @wraps(func) @@ -51,8 +68,8 @@ def async_handle_failed_command(func: Callable) -> Callable: hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any], - *args: Any, - **kwargs: Any, + *args: _P.args, + **kwargs: _P.kwargs, ) -> None: """Handle MatterError within function and send relevant error.""" try: diff --git a/homeassistant/components/matter/event.py b/homeassistant/components/matter/event.py index 3361c3fa146..e84fcec32d8 100644 --- a/homeassistant/components/matter/event.py +++ b/homeassistant/components/matter/event.py @@ -104,9 +104,11 @@ class MatterEventEntity(MatterEntity, EventEntity): """Call when Node attribute(s) changed.""" @callback - def _on_matter_node_event( - self, event: EventType, data: MatterNodeEvent - ) -> None: # noqa: F821 + def _on_matter_node_event( # noqa: F821 + self, + event: EventType, + data: MatterNodeEvent, + ) -> None: """Call on NodeEvent.""" if data.endpoint_id != self._endpoint.endpoint_id: return diff --git a/homeassistant/components/matter/helpers.py b/homeassistant/components/matter/helpers.py index dcd6a30ee1f..446d5dc3591 100644 --- a/homeassistant/components/matter/helpers.py +++ b/homeassistant/components/matter/helpers.py @@ -94,7 +94,7 @@ def get_node_from_device_entry( ) if device_id_full is None: - raise ValueError(f"Device {device.id} is not a Matter device") + return None device_id = device_id_full.lstrip(device_id_type_prefix) matter_client = matter.matter_client diff --git a/homeassistant/components/matter/manifest.json b/homeassistant/components/matter/manifest.json index 6f494153a97..f350cda9227 100644 --- a/homeassistant/components/matter/manifest.json +++ b/homeassistant/components/matter/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["websocket_api"], "documentation": "https://www.home-assistant.io/integrations/matter", "iot_class": "local_push", - "requirements": ["python-matter-server==4.0.0"] + "requirements": ["python-matter-server==5.0.0"] } diff --git a/homeassistant/components/matter/switch.py b/homeassistant/components/matter/switch.py index e1fb4464b83..61922e8e8c9 100644 --- a/homeassistant/components/matter/switch.py +++ b/homeassistant/components/matter/switch.py @@ -67,7 +67,15 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSwitch, required_attributes=(clusters.OnOff.Attributes.OnOff,), - # restrict device type to prevent discovery by the wrong platform + device_type=(device_types.OnOffPlugInUnit,), + ), + MatterDiscoverySchema( + platform=Platform.SWITCH, + entity_description=SwitchEntityDescription( + key="MatterSwitch", device_class=SwitchDeviceClass.SWITCH, name=None + ), + entity_class=MatterSwitch, + required_attributes=(clusters.OnOff.Attributes.OnOff,), not_device_type=( device_types.ColorTemperatureLight, device_types.DimmableLight, @@ -76,7 +84,6 @@ DISCOVERY_SCHEMAS = [ device_types.DoorLock, device_types.ColorDimmerSwitch, device_types.DimmerSwitch, - device_types.OnOffLightSwitch, device_types.Thermostat, ), ), diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index d16439800a9..111509c1f31 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -7,5 +7,5 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp==2023.10.13"] + "requirements": ["yt-dlp==2023.11.16"] } diff --git a/homeassistant/components/media_player/__init__.py b/homeassistant/components/media_player/__init__.py index f3ff925a1a4..50365f90f1f 100644 --- a/homeassistant/components/media_player/__init__.py +++ b/homeassistant/components/media_player/__init__.py @@ -1137,8 +1137,7 @@ class MediaPlayerImageView(HomeAssistantView): extra_urls = [ # Need to modify the default regex for media_content_id as it may # include arbitrary characters including '/','{', or '}' - url - + "/browse_media/{media_content_type}/{media_content_id:.+}", + url + "/browse_media/{media_content_type}/{media_content_id:.+}", ] def __init__(self, component: EntityComponent[MediaPlayerEntity]) -> None: diff --git a/homeassistant/components/mill/manifest.json b/homeassistant/components/mill/manifest.json index cb0ba4522bf..7bb78eb05e7 100644 --- a/homeassistant/components/mill/manifest.json +++ b/homeassistant/components/mill/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mill", "iot_class": "local_polling", "loggers": ["mill", "mill_local"], - "requirements": ["millheater==0.11.6", "mill-local==0.3.0"] + "requirements": ["millheater==0.11.7", "mill-local==0.3.0"] } diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index 764cf4930f7..c0474ad75d5 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -435,16 +435,24 @@ class ModbusHub: try: result: ModbusResponse = entry.func(address, value, **kwargs) except ModbusException as exception_error: - self._log_error(str(exception_error)) + error = ( + f"Error: device: {slave} address: {address} -> {str(exception_error)}" + ) + self._log_error(error) return None if not result: - self._log_error("Error: pymodbus returned None") + error = ( + f"Error: device: {slave} address: {address} -> pymodbus returned None" + ) + self._log_error(error) return None if not hasattr(result, entry.attr): - self._log_error(str(result)) + error = f"Error: device: {slave} address: {address} -> {str(result)}" + self._log_error(error) return None if result.isError(): - self._log_error("Error: pymodbus returned isError True") + error = f"Error: device: {slave} address: {address} -> pymodbus returned isError True" + self._log_error(error) return None self._in_error = False return result diff --git a/homeassistant/components/modbus/validators.py b/homeassistant/components/modbus/validators.py index fbf56d97b51..52919a24ac7 100644 --- a/homeassistant/components/modbus/validators.py +++ b/homeassistant/components/modbus/validators.py @@ -52,6 +52,12 @@ ENTRY = namedtuple( "validate_parm", ], ) + + +ILLEGAL = "I" +OPTIONAL = "O" +DEMANDED = "D" + PARM_IS_LEGAL = namedtuple( "PARM_IS_LEGAL", [ @@ -62,28 +68,40 @@ PARM_IS_LEGAL = namedtuple( "swap_word", ], ) -# PARM_IS_LEGAL defines if the keywords: -# count: -# structure: -# swap: byte -# swap: word -# swap: word_byte (identical to swap: word) -# are legal to use. -# These keywords are only legal with some datatype: ... -# As expressed in DEFAULT_STRUCT_FORMAT - DEFAULT_STRUCT_FORMAT = { - DataType.INT16: ENTRY("h", 1, PARM_IS_LEGAL(False, False, True, True, False)), - DataType.UINT16: ENTRY("H", 1, PARM_IS_LEGAL(False, False, True, True, False)), - DataType.FLOAT16: ENTRY("e", 1, PARM_IS_LEGAL(False, False, True, True, False)), - DataType.INT32: ENTRY("i", 2, PARM_IS_LEGAL(False, False, True, True, True)), - DataType.UINT32: ENTRY("I", 2, PARM_IS_LEGAL(False, False, True, True, True)), - DataType.FLOAT32: ENTRY("f", 2, PARM_IS_LEGAL(False, False, True, True, True)), - DataType.INT64: ENTRY("q", 4, PARM_IS_LEGAL(False, False, True, True, True)), - DataType.UINT64: ENTRY("Q", 4, PARM_IS_LEGAL(False, False, True, True, True)), - DataType.FLOAT64: ENTRY("d", 4, PARM_IS_LEGAL(False, False, True, True, True)), - DataType.STRING: ENTRY("s", -1, PARM_IS_LEGAL(True, False, False, True, False)), - DataType.CUSTOM: ENTRY("?", 0, PARM_IS_LEGAL(True, True, False, False, False)), + DataType.INT16: ENTRY( + "h", 1, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, ILLEGAL) + ), + DataType.UINT16: ENTRY( + "H", 1, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, ILLEGAL) + ), + DataType.FLOAT16: ENTRY( + "e", 1, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, ILLEGAL) + ), + DataType.INT32: ENTRY( + "i", 2, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, OPTIONAL) + ), + DataType.UINT32: ENTRY( + "I", 2, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, OPTIONAL) + ), + DataType.FLOAT32: ENTRY( + "f", 2, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, OPTIONAL) + ), + DataType.INT64: ENTRY( + "q", 4, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, OPTIONAL) + ), + DataType.UINT64: ENTRY( + "Q", 4, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, OPTIONAL) + ), + DataType.FLOAT64: ENTRY( + "d", 4, PARM_IS_LEGAL(ILLEGAL, ILLEGAL, OPTIONAL, OPTIONAL, OPTIONAL) + ), + DataType.STRING: ENTRY( + "s", 0, PARM_IS_LEGAL(DEMANDED, ILLEGAL, ILLEGAL, OPTIONAL, ILLEGAL) + ), + DataType.CUSTOM: ENTRY( + "?", 0, PARM_IS_LEGAL(DEMANDED, DEMANDED, ILLEGAL, ILLEGAL, ILLEGAL) + ), } @@ -96,32 +114,37 @@ def struct_validator(config: dict[str, Any]) -> dict[str, Any]: data_type = config[CONF_DATA_TYPE] = DataType.INT16 count = config.get(CONF_COUNT, None) structure = config.get(CONF_STRUCTURE, None) - slave_count = config.get(CONF_SLAVE_COUNT, config.get(CONF_VIRTUAL_COUNT, 0)) + slave_count = config.get(CONF_SLAVE_COUNT, config.get(CONF_VIRTUAL_COUNT)) swap_type = config.get(CONF_SWAP, CONF_SWAP_NONE) validator = DEFAULT_STRUCT_FORMAT[data_type].validate_parm for entry in ( (count, validator.count, CONF_COUNT), (structure, validator.structure, CONF_STRUCTURE), + ( + slave_count, + validator.slave_count, + f"{CONF_VIRTUAL_COUNT} / {CONF_SLAVE_COUNT}", + ), ): - if bool(entry[0]) != entry[1]: - error = "cannot be combined" if not entry[1] else "missing, demanded" + if entry[0] is None: + if entry[1] == DEMANDED: + error = f"{name}: `{entry[2]}:` missing, demanded with `{CONF_DATA_TYPE}: {data_type}`" + raise vol.Invalid(error) + elif entry[1] == ILLEGAL: error = ( - f"{name}: `{entry[2]}:` {error} with `{CONF_DATA_TYPE}: {data_type}`" + f"{name}: `{entry[2]}:` illegal with `{CONF_DATA_TYPE}: {data_type}`" ) raise vol.Invalid(error) - if slave_count and not validator.slave_count: - error = f"{name}: `{CONF_VIRTUAL_COUNT} / {CONF_SLAVE_COUNT}:` cannot be combined with `{CONF_DATA_TYPE}: {data_type}`" - raise vol.Invalid(error) if swap_type != CONF_SWAP_NONE: swap_type_validator = { - CONF_SWAP_NONE: False, + CONF_SWAP_NONE: validator.swap_byte, CONF_SWAP_BYTE: validator.swap_byte, CONF_SWAP_WORD: validator.swap_word, CONF_SWAP_WORD_BYTE: validator.swap_word, }[swap_type] - if not swap_type_validator: - error = f"{name}: `{CONF_SWAP}:{swap_type}` cannot be combined with `{CONF_DATA_TYPE}: {data_type}`" + if swap_type_validator == ILLEGAL: + error = f"{name}: `{CONF_SWAP}:{swap_type}` illegal with `{CONF_DATA_TYPE}: {data_type}`" raise vol.Invalid(error) if config[CONF_DATA_TYPE] == DataType.CUSTOM: try: diff --git a/homeassistant/components/mopeka/manifest.json b/homeassistant/components/mopeka/manifest.json index d6b5618bf97..766af715485 100644 --- a/homeassistant/components/mopeka/manifest.json +++ b/homeassistant/components/mopeka/manifest.json @@ -21,5 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/mopeka", "integration_type": "device", "iot_class": "local_push", - "requirements": ["mopeka-iot-ble==0.4.1"] + "requirements": ["mopeka-iot-ble==0.5.0"] } diff --git a/homeassistant/components/motion_blinds/sensor.py b/homeassistant/components/motion_blinds/sensor.py index d8dc25e0006..e71abe09069 100644 --- a/homeassistant/components/motion_blinds/sensor.py +++ b/homeassistant/components/motion_blinds/sensor.py @@ -48,6 +48,7 @@ class MotionBatterySensor(MotionCoordinatorEntity, SensorEntity): _attr_device_class = SensorDeviceClass.BATTERY _attr_native_unit_of_measurement = PERCENTAGE + _attr_entity_category = EntityCategory.DIAGNOSTIC def __init__(self, coordinator, blind): """Initialize the Motion Battery Sensor.""" diff --git a/homeassistant/components/mpd/media_player.py b/homeassistant/components/mpd/media_player.py index 8eab83b5d41..9b3adb38e0c 100644 --- a/homeassistant/components/mpd/media_player.py +++ b/homeassistant/components/mpd/media_player.py @@ -1,11 +1,13 @@ """Support to interact with a Music Player Daemon.""" from __future__ import annotations -from contextlib import suppress +import asyncio +from contextlib import asynccontextmanager, suppress from datetime import timedelta import hashlib import logging import os +from socket import gaierror from typing import Any import mpd @@ -92,11 +94,11 @@ class MpdDevice(MediaPlayerEntity): self._name = name self.password = password - self._status = None + self._status = {} self._currentsong = None self._playlists = None self._currentplaylist = None - self._is_connected = False + self._is_available = None self._muted = False self._muted_volume = None self._media_position_updated_at = None @@ -104,67 +106,88 @@ class MpdDevice(MediaPlayerEntity): self._media_image_hash = None # Track if the song changed so image doesn't have to be loaded every update. self._media_image_file = None - self._commands = None # set up MPD client self._client = MPDClient() self._client.timeout = 30 - self._client.idletimeout = None + self._client.idletimeout = 10 + self._client_lock = asyncio.Lock() - async def _connect(self): - """Connect to MPD.""" - try: - await self._client.connect(self.server, self.port) - - if self.password is not None: - await self._client.password(self.password) - except mpd.ConnectionError: - return - - self._is_connected = True - - def _disconnect(self): - """Disconnect from MPD.""" - with suppress(mpd.ConnectionError): - self._client.disconnect() - self._is_connected = False - self._status = None - - async def _fetch_status(self): - """Fetch status from MPD.""" - self._status = await self._client.status() - self._currentsong = await self._client.currentsong() - await self._async_update_media_image_hash() - - if (position := self._status.get("elapsed")) is None: - position = self._status.get("time") - - if isinstance(position, str) and ":" in position: - position = position.split(":")[0] - - if position is not None and self._media_position != position: - self._media_position_updated_at = dt_util.utcnow() - self._media_position = int(float(position)) - - await self._update_playlists() - - @property - def available(self): - """Return true if MPD is available and connected.""" - return self._is_connected + # Instead of relying on python-mpd2 to maintain a (persistent) connection to + # MPD, the below explicitly sets up a *non*-persistent connection. This is + # done to workaround the issue as described in: + # + @asynccontextmanager + async def connection(self): + """Handle MPD connect and disconnect.""" + async with self._client_lock: + try: + # MPDClient.connect() doesn't always respect its timeout. To + # prevent a deadlock, enforce an additional (slightly longer) + # timeout on the coroutine itself. + try: + async with asyncio.timeout(self._client.timeout + 5): + await self._client.connect(self.server, self.port) + except asyncio.TimeoutError as error: + # TimeoutError has no message (which hinders logging further + # down the line), so provide one. + raise asyncio.TimeoutError( + "Connection attempt timed out" + ) from error + if self.password is not None: + await self._client.password(self.password) + self._is_available = True + yield + except ( + asyncio.TimeoutError, + gaierror, + mpd.ConnectionError, + OSError, + ) as error: + # Log a warning during startup or when previously connected; for + # subsequent errors a debug message is sufficient. + log_level = logging.DEBUG + if self._is_available is not False: + log_level = logging.WARNING + _LOGGER.log( + log_level, "Error connecting to '%s': %s", self.server, error + ) + self._is_available = False + self._status = {} + # Also yield on failure. Handling mpd.ConnectionErrors caused by + # attempting to control a disconnected client is the + # responsibility of the caller. + yield + finally: + with suppress(mpd.ConnectionError): + self._client.disconnect() async def async_update(self) -> None: - """Get the latest data and update the state.""" - try: - if not self._is_connected: - await self._connect() - self._commands = list(await self._client.commands()) + """Get the latest data from MPD and update the state.""" + async with self.connection(): + try: + self._status = await self._client.status() + self._currentsong = await self._client.currentsong() + await self._async_update_media_image_hash() - await self._fetch_status() - except (mpd.ConnectionError, OSError, ValueError) as error: - # Cleanly disconnect in case connection is not in valid state - _LOGGER.debug("Error updating status: %s", error) - self._disconnect() + if (position := self._status.get("elapsed")) is None: + position = self._status.get("time") + + if isinstance(position, str) and ":" in position: + position = position.split(":")[0] + + if position is not None and self._media_position != position: + self._media_position_updated_at = dt_util.utcnow() + self._media_position = int(float(position)) + + await self._update_playlists() + except (mpd.ConnectionError, ValueError) as error: + _LOGGER.debug("Error updating status: %s", error) + + @property + def available(self) -> bool: + """Return true if MPD is available and connected.""" + return self._is_available is True @property def name(self): @@ -174,13 +197,13 @@ class MpdDevice(MediaPlayerEntity): @property def state(self) -> MediaPlayerState: """Return the media state.""" - if self._status is None: + if not self._status: return MediaPlayerState.OFF - if self._status["state"] == "play": + if self._status.get("state") == "play": return MediaPlayerState.PLAYING - if self._status["state"] == "pause": + if self._status.get("state") == "pause": return MediaPlayerState.PAUSED - if self._status["state"] == "stop": + if self._status.get("state") == "stop": return MediaPlayerState.OFF return MediaPlayerState.OFF @@ -259,20 +282,26 @@ class MpdDevice(MediaPlayerEntity): async def async_get_media_image(self) -> tuple[bytes | None, str | None]: """Fetch media image of current playing track.""" - if not (file := self._currentsong.get("file")): - return None, None - response = await self._async_get_file_image_response(file) - if response is None: - return None, None + async with self.connection(): + if self._currentsong is None or not (file := self._currentsong.get("file")): + return None, None - image = bytes(response["binary"]) - mime = response.get( - "type", "image/png" - ) # readpicture has type, albumart does not - return (image, mime) + with suppress(mpd.ConnectionError): + response = await self._async_get_file_image_response(file) + if response is None: + return None, None + + image = bytes(response["binary"]) + mime = response.get( + "type", "image/png" + ) # readpicture has type, albumart does not + return (image, mime) async def _async_update_media_image_hash(self): """Update the hash value for the media image.""" + if self._currentsong is None: + return + file = self._currentsong.get("file") if file == self._media_image_file: @@ -295,16 +324,21 @@ class MpdDevice(MediaPlayerEntity): self._media_image_file = file async def _async_get_file_image_response(self, file): - # not all MPD implementations and versions support the `albumart` and `fetchpicture` commands - can_albumart = "albumart" in self._commands - can_readpicture = "readpicture" in self._commands + # not all MPD implementations and versions support the `albumart` and + # `fetchpicture` commands. + commands = [] + with suppress(mpd.ConnectionError): + commands = list(await self._client.commands()) + can_albumart = "albumart" in commands + can_readpicture = "readpicture" in commands response = None # read artwork embedded into the media file if can_readpicture: try: - response = await self._client.readpicture(file) + with suppress(mpd.ConnectionError): + response = await self._client.readpicture(file) except mpd.CommandError as error: if error.errno is not mpd.FailureResponseCode.NO_EXIST: _LOGGER.warning( @@ -315,7 +349,8 @@ class MpdDevice(MediaPlayerEntity): # read artwork contained in the media directory (cover.{jpg,png,tiff,bmp}) if none is embedded if can_albumart and not response: try: - response = await self._client.albumart(file) + with suppress(mpd.ConnectionError): + response = await self._client.albumart(file) except mpd.CommandError as error: if error.errno is not mpd.FailureResponseCode.NO_EXIST: _LOGGER.warning( @@ -339,7 +374,7 @@ class MpdDevice(MediaPlayerEntity): @property def supported_features(self) -> MediaPlayerEntityFeature: """Flag media player features that are supported.""" - if self._status is None: + if not self._status: return MediaPlayerEntityFeature(0) supported = SUPPORT_MPD @@ -373,55 +408,64 @@ class MpdDevice(MediaPlayerEntity): """Update available MPD playlists.""" try: self._playlists = [] - for playlist_data in await self._client.listplaylists(): - self._playlists.append(playlist_data["playlist"]) + with suppress(mpd.ConnectionError): + for playlist_data in await self._client.listplaylists(): + self._playlists.append(playlist_data["playlist"]) except mpd.CommandError as error: self._playlists = None _LOGGER.warning("Playlists could not be updated: %s:", error) async def async_set_volume_level(self, volume: float) -> None: """Set volume of media player.""" - if "volume" in self._status: - await self._client.setvol(int(volume * 100)) + async with self.connection(): + if "volume" in self._status: + await self._client.setvol(int(volume * 100)) async def async_volume_up(self) -> None: """Service to send the MPD the command for volume up.""" - if "volume" in self._status: - current_volume = int(self._status["volume"]) + async with self.connection(): + if "volume" in self._status: + current_volume = int(self._status["volume"]) - if current_volume <= 100: - self._client.setvol(current_volume + 5) + if current_volume <= 100: + self._client.setvol(current_volume + 5) async def async_volume_down(self) -> None: """Service to send the MPD the command for volume down.""" - if "volume" in self._status: - current_volume = int(self._status["volume"]) + async with self.connection(): + if "volume" in self._status: + current_volume = int(self._status["volume"]) - if current_volume >= 0: - await self._client.setvol(current_volume - 5) + if current_volume >= 0: + await self._client.setvol(current_volume - 5) async def async_media_play(self) -> None: """Service to send the MPD the command for play/pause.""" - if self._status["state"] == "pause": - await self._client.pause(0) - else: - await self._client.play() + async with self.connection(): + if self._status.get("state") == "pause": + await self._client.pause(0) + else: + await self._client.play() async def async_media_pause(self) -> None: """Service to send the MPD the command for play/pause.""" - await self._client.pause(1) + async with self.connection(): + await self._client.pause(1) async def async_media_stop(self) -> None: """Service to send the MPD the command for stop.""" - await self._client.stop() + async with self.connection(): + await self._client.stop() async def async_media_next_track(self) -> None: """Service to send the MPD the command for next track.""" - await self._client.next() + async with self.connection(): + await self._client.next() async def async_media_previous_track(self) -> None: """Service to send the MPD the command for previous track.""" - await self._client.previous() + async with self.connection(): + await self._client.previous() async def async_mute_volume(self, mute: bool) -> None: """Mute. Emulated with set_volume_level.""" @@ -437,75 +481,82 @@ class MpdDevice(MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Send the media player the command for playing a playlist.""" - if media_source.is_media_source_id(media_id): - media_type = MediaType.MUSIC - play_item = await media_source.async_resolve_media( - self.hass, media_id, self.entity_id - ) - media_id = async_process_play_media_url(self.hass, play_item.url) + async with self.connection(): + if media_source.is_media_source_id(media_id): + media_type = MediaType.MUSIC + play_item = await media_source.async_resolve_media( + self.hass, media_id, self.entity_id + ) + media_id = async_process_play_media_url(self.hass, play_item.url) - if media_type == MediaType.PLAYLIST: - _LOGGER.debug("Playing playlist: %s", media_id) - if media_id in self._playlists: - self._currentplaylist = media_id + if media_type == MediaType.PLAYLIST: + _LOGGER.debug("Playing playlist: %s", media_id) + if media_id in self._playlists: + self._currentplaylist = media_id + else: + self._currentplaylist = None + _LOGGER.warning("Unknown playlist name %s", media_id) + await self._client.clear() + await self._client.load(media_id) + await self._client.play() else: + await self._client.clear() self._currentplaylist = None - _LOGGER.warning("Unknown playlist name %s", media_id) - await self._client.clear() - await self._client.load(media_id) - await self._client.play() - else: - await self._client.clear() - self._currentplaylist = None - await self._client.add(media_id) - await self._client.play() + await self._client.add(media_id) + await self._client.play() @property def repeat(self) -> RepeatMode: """Return current repeat mode.""" - if self._status["repeat"] == "1": - if self._status["single"] == "1": + if self._status.get("repeat") == "1": + if self._status.get("single") == "1": return RepeatMode.ONE return RepeatMode.ALL return RepeatMode.OFF async def async_set_repeat(self, repeat: RepeatMode) -> None: """Set repeat mode.""" - if repeat == RepeatMode.OFF: - await self._client.repeat(0) - await self._client.single(0) - else: - await self._client.repeat(1) - if repeat == RepeatMode.ONE: - await self._client.single(1) - else: + async with self.connection(): + if repeat == RepeatMode.OFF: + await self._client.repeat(0) await self._client.single(0) + else: + await self._client.repeat(1) + if repeat == RepeatMode.ONE: + await self._client.single(1) + else: + await self._client.single(0) @property def shuffle(self): """Boolean if shuffle is enabled.""" - return bool(int(self._status["random"])) + return bool(int(self._status.get("random"))) async def async_set_shuffle(self, shuffle: bool) -> None: """Enable/disable shuffle mode.""" - await self._client.random(int(shuffle)) + async with self.connection(): + await self._client.random(int(shuffle)) async def async_turn_off(self) -> None: """Service to send the MPD the command to stop playing.""" - await self._client.stop() + async with self.connection(): + await self._client.stop() async def async_turn_on(self) -> None: """Service to send the MPD the command to start playing.""" - await self._client.play() - await self._update_playlists(no_throttle=True) + async with self.connection(): + await self._client.play() + await self._update_playlists(no_throttle=True) async def async_clear_playlist(self) -> None: """Clear players playlist.""" - await self._client.clear() + async with self.connection(): + await self._client.clear() async def async_media_seek(self, position: float) -> None: """Send seek command.""" - await self._client.seekcur(position) + async with self.connection(): + await self._client.seekcur(position) async def async_browse_media( self, @@ -513,8 +564,11 @@ class MpdDevice(MediaPlayerEntity): media_content_id: str | None = None, ) -> BrowseMedia: """Implement the websocket media browsing helper.""" - return await media_source.async_browse_media( - self.hass, - media_content_id, - content_filter=lambda item: item.media_content_type.startswith("audio/"), - ) + async with self.connection(): + return await media_source.async_browse_media( + self.hass, + media_content_id, + content_filter=lambda item: item.media_content_type.startswith( + "audio/" + ), + ) diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index effff9fdf12..16f584db011 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -25,7 +25,7 @@ from homeassistant.const import ( ) from homeassistant.core import HassJob, HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ( - HomeAssistantError, + ConfigValidationError, ServiceValidationError, TemplateError, Unauthorized, @@ -245,11 +245,11 @@ async def async_check_config_schema( for config in config_items: try: schema(config) - except vol.Invalid as ex: + except vol.Invalid as exc: integration = await async_get_integration(hass, DOMAIN) # pylint: disable-next=protected-access - message, _ = conf_util._format_config_error( - ex, domain, config, integration.documentation + message = conf_util.format_schema_error( + hass, exc, domain, config, integration.documentation ) raise ServiceValidationError( message, @@ -258,7 +258,7 @@ async def async_check_config_schema( translation_placeholders={ "domain": domain, }, - ) from ex + ) from exc async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -417,14 +417,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def _reload_config(call: ServiceCall) -> None: """Reload the platforms.""" # Fetch updated manually configured items and validate - if ( - config_yaml := await async_integration_yaml_config(hass, DOMAIN) - ) is None: - # Raise in case we have an invalid configuration - raise HomeAssistantError( - "Error reloading manually configured MQTT items, " - "check your configuration.yaml" + try: + config_yaml = await async_integration_yaml_config( + hass, DOMAIN, raise_on_failure=True ) + except ConfigValidationError as ex: + raise ServiceValidationError( + str(ex), + translation_domain=ex.translation_domain, + translation_key=ex.translation_key, + translation_placeholders=ex.translation_placeholders, + ) from ex + # Check the schema before continuing reload await async_check_config_schema(hass, config_yaml) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 2e4d49b4cd9..c87d4c9244a 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -124,7 +124,10 @@ async def async_publish( """Publish message to a MQTT topic.""" if not mqtt_config_entry_enabled(hass): raise HomeAssistantError( - f"Cannot publish to topic '{topic}', MQTT is not enabled" + f"Cannot publish to topic '{topic}', MQTT is not enabled", + translation_key="mqtt_not_setup_cannot_publish", + translation_domain=DOMAIN, + translation_placeholders={"topic": topic}, ) mqtt_data = get_mqtt_data(hass) outgoing_payload = payload @@ -174,15 +177,21 @@ async def async_subscribe( """ if not mqtt_config_entry_enabled(hass): raise HomeAssistantError( - f"Cannot subscribe to topic '{topic}', MQTT is not enabled" + f"Cannot subscribe to topic '{topic}', MQTT is not enabled", + translation_key="mqtt_not_setup_cannot_subscribe", + translation_domain=DOMAIN, + translation_placeholders={"topic": topic}, ) try: mqtt_data = get_mqtt_data(hass) - except KeyError as ex: + except KeyError as exc: raise HomeAssistantError( f"Cannot subscribe to topic '{topic}', " - "make sure MQTT is set up correctly" - ) from ex + "make sure MQTT is set up correctly", + translation_key="mqtt_not_setup_cannot_subscribe", + translation_domain=DOMAIN, + translation_placeholders={"topic": topic}, + ) from exc async_remove = await mqtt_data.client.async_subscribe( topic, catch_log_exception( @@ -606,8 +615,8 @@ class MQTT: del simple_subscriptions[topic] else: self._wildcard_subscriptions.remove(subscription) - except (KeyError, ValueError) as ex: - raise HomeAssistantError("Can't remove subscription twice") from ex + except (KeyError, ValueError) as exc: + raise HomeAssistantError("Can't remove subscription twice") from exc @callback def _async_queue_subscriptions( diff --git a/homeassistant/components/mqtt/climate.py b/homeassistant/components/mqtt/climate.py index 358fa6eb675..c8696071fb4 100644 --- a/homeassistant/components/mqtt/climate.py +++ b/homeassistant/components/mqtt/climate.py @@ -256,7 +256,7 @@ def valid_humidity_state_configuration(config: ConfigType) -> ConfigType: CONF_HUMIDITY_STATE_TOPIC in config and CONF_HUMIDITY_COMMAND_TOPIC not in config ): - raise ValueError( + raise vol.Invalid( f"{CONF_HUMIDITY_STATE_TOPIC} cannot be used without" f" {CONF_HUMIDITY_COMMAND_TOPIC}" ) @@ -470,9 +470,10 @@ class MqttTemperatureControlEntity(MqttEntity, ABC): except ValueError: _LOGGER.error("Could not parse %s from %s", template_name, payload) - def prepare_subscribe_topics( - self, topics: dict[str, dict[str, Any]] - ) -> None: # noqa: C901 + def prepare_subscribe_topics( # noqa: C901 + self, + topics: dict[str, dict[str, Any]], + ) -> None: """(Re)Subscribe to topics.""" @callback diff --git a/homeassistant/components/mqtt/cover.py b/homeassistant/components/mqtt/cover.py index c8da14e67e6..4e8cf0f4129 100644 --- a/homeassistant/components/mqtt/cover.py +++ b/homeassistant/components/mqtt/cover.py @@ -380,7 +380,11 @@ class MqttCover(MqttEntity, CoverEntity): else STATE_OPEN ) else: - state = STATE_CLOSED if self.state == STATE_CLOSING else STATE_OPEN + state = ( + STATE_CLOSED + if self.state in [STATE_CLOSED, STATE_CLOSING] + else STATE_OPEN + ) elif payload == self._config[CONF_STATE_OPENING]: state = STATE_OPENING elif payload == self._config[CONF_STATE_CLOSING]: diff --git a/homeassistant/components/mqtt/fan.py b/homeassistant/components/mqtt/fan.py index 0e9e7d708e9..e3dcf66c8b1 100644 --- a/homeassistant/components/mqtt/fan.py +++ b/homeassistant/components/mqtt/fan.py @@ -553,8 +553,6 @@ class MqttFan(MqttEntity, FanEntity): This method is a coroutine. """ - self._valid_preset_mode_or_raise(preset_mode) - mqtt_payload = self._command_templates[ATTR_PRESET_MODE](preset_mode) await self.async_publish( diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 6f70ff34051..3d2957f153d 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -367,10 +367,10 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): if brightness_supported(self.supported_color_modes): try: if brightness := values["brightness"]: - self._attr_brightness = int( - brightness # type: ignore[operator] - / float(self._config[CONF_BRIGHTNESS_SCALE]) - * 255 + scale = self._config[CONF_BRIGHTNESS_SCALE] + self._attr_brightness = min( + 255, + round(brightness * 255 / scale), # type: ignore[operator] ) else: _LOGGER.debug( diff --git a/homeassistant/components/mqtt/mixins.py b/homeassistant/components/mqtt/mixins.py index d84f430bd85..76300afb97a 100644 --- a/homeassistant/components/mqtt/mixins.py +++ b/homeassistant/components/mqtt/mixins.py @@ -457,8 +457,8 @@ async def async_setup_entity_entry_helper( if TYPE_CHECKING: assert entity_class is not None entities.append(entity_class(hass, config, entry, None)) - except vol.Invalid as ex: - error = str(ex) + except vol.Invalid as exc: + error = str(exc) config_file = getattr(yaml_config, "__config_file__", "?") line = getattr(yaml_config, "__line__", "?") issue_id = hex(hash(frozenset(yaml_config))) diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index 2da2527ad7b..63b8d537170 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -247,15 +247,15 @@ class MqttValueTemplate: payload, variables=values ) ) - except Exception as ex: + except Exception as exc: _LOGGER.error( "%s: %s rendering template for entity '%s', template: '%s'", - type(ex).__name__, - ex, + type(exc).__name__, + exc, self._entity.entity_id if self._entity else "n/a", self._value_template.template, ) - raise ex + raise exc return rendered_payload _LOGGER.debug( diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 7f8dcfedd9a..f35cd7c2b58 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -214,7 +214,13 @@ }, "exceptions": { "invalid_platform_config": { - "message": "Reloading YAML config for manually configured MQTT `{domain}` failed. See logs for more details." + "message": "Reloading YAML config for manually configured MQTT `{domain}` item failed. See logs for more details." + }, + "mqtt_not_setup_cannot_subscribe": { + "message": "Cannot subscribe to topic '{topic}', make sure MQTT is set up correctly." + }, + "mqtt_not_setup_cannot_publish": { + "message": "Cannot publish to topic '{topic}', make sure MQTT is set up correctly." } } } diff --git a/homeassistant/components/mqtt/util.py b/homeassistant/components/mqtt/util.py index 6e364182cb0..f478ad712d7 100644 --- a/homeassistant/components/mqtt/util.py +++ b/homeassistant/components/mqtt/util.py @@ -63,9 +63,8 @@ async def async_wait_for_mqtt_client(hass: HomeAssistant) -> bool: state_reached_future: asyncio.Future[bool] if DATA_MQTT_AVAILABLE not in hass.data: - hass.data[ - DATA_MQTT_AVAILABLE - ] = state_reached_future = hass.loop.create_future() + state_reached_future = hass.loop.create_future() + hass.data[DATA_MQTT_AVAILABLE] = state_reached_future else: state_reached_future = hass.data[DATA_MQTT_AVAILABLE] if state_reached_future.done(): diff --git a/homeassistant/components/nam/manifest.json b/homeassistant/components/nam/manifest.json index 8d4396d5d80..a4ef9af9aee 100644 --- a/homeassistant/components/nam/manifest.json +++ b/homeassistant/components/nam/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_polling", "loggers": ["nettigo_air_monitor"], "quality_scale": "platinum", - "requirements": ["nettigo-air-monitor==2.2.1"], + "requirements": ["nettigo-air-monitor==2.2.2"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/nam/strings.json b/homeassistant/components/nam/strings.json index e443a398984..83a40d87f76 100644 --- a/homeassistant/components/nam/strings.json +++ b/homeassistant/components/nam/strings.json @@ -6,6 +6,9 @@ "description": "Set up Nettigo Air Monitor integration.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Nettigo Air Monitor to control." } }, "credentials": { diff --git a/homeassistant/components/netatmo/__init__.py b/homeassistant/components/netatmo/__init__.py index ddd2fc61ed7..4535805915b 100644 --- a/homeassistant/components/netatmo/__init__.py +++ b/homeassistant/components/netatmo/__init__.py @@ -8,7 +8,6 @@ from typing import Any import aiohttp import pyatmo -from pyatmo.const import ALL_SCOPES as NETATMO_SCOPES import voluptuous as vol from homeassistant.components import cloud @@ -143,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: await session.async_ensure_token_valid() except aiohttp.ClientResponseError as ex: - _LOGGER.debug("API error: %s (%s)", ex.status, ex.message) + _LOGGER.warning("API error: %s (%s)", ex.status, ex.message) if ex.status in ( HTTPStatus.BAD_REQUEST, HTTPStatus.UNAUTHORIZED, @@ -152,19 +151,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryAuthFailed("Token not valid, trigger renewal") from ex raise ConfigEntryNotReady from ex - if entry.data["auth_implementation"] == cloud.DOMAIN: - required_scopes = { - scope - for scope in NETATMO_SCOPES - if scope not in ("access_doorbell", "read_doorbell") - } - else: - required_scopes = set(NETATMO_SCOPES) - - if not (set(session.token["scope"]) & required_scopes): - _LOGGER.debug( + required_scopes = api.get_api_scopes(entry.data["auth_implementation"]) + if not (set(session.token["scope"]) & set(required_scopes)): + _LOGGER.warning( "Session is missing scopes: %s", - required_scopes - set(session.token["scope"]), + set(required_scopes) - set(session.token["scope"]), ) raise ConfigEntryAuthFailed("Token scope not valid, trigger renewal") diff --git a/homeassistant/components/netatmo/api.py b/homeassistant/components/netatmo/api.py index 0b36745338e..7605689b3f5 100644 --- a/homeassistant/components/netatmo/api.py +++ b/homeassistant/components/netatmo/api.py @@ -1,11 +1,29 @@ """API for Netatmo bound to HASS OAuth.""" +from collections.abc import Iterable from typing import cast from aiohttp import ClientSession import pyatmo +from homeassistant.components import cloud from homeassistant.helpers import config_entry_oauth2_flow +from .const import API_SCOPES_EXCLUDED_FROM_CLOUD + + +def get_api_scopes(auth_implementation: str) -> Iterable[str]: + """Return the Netatmo API scopes based on the auth implementation.""" + + if auth_implementation == cloud.DOMAIN: + return set( + { + scope + for scope in pyatmo.const.ALL_SCOPES + if scope not in API_SCOPES_EXCLUDED_FROM_CLOUD + } + ) + return sorted(pyatmo.const.ALL_SCOPES) + class AsyncConfigEntryNetatmoAuth(pyatmo.AbstractAsyncAuth): """Provide Netatmo authentication tied to an OAuth2 based config entry.""" diff --git a/homeassistant/components/netatmo/climate.py b/homeassistant/components/netatmo/climate.py index a14cadf45c4..5a05818d3f2 100644 --- a/homeassistant/components/netatmo/climate.py +++ b/homeassistant/components/netatmo/climate.py @@ -39,6 +39,8 @@ from .const import ( ATTR_HEATING_POWER_REQUEST, ATTR_SCHEDULE_NAME, ATTR_SELECTED_SCHEDULE, + ATTR_TARGET_TEMPERATURE, + ATTR_TIME_PERIOD, CONF_URL_ENERGY, DATA_SCHEDULES, DOMAIN, @@ -47,8 +49,11 @@ from .const import ( EVENT_TYPE_SET_POINT, EVENT_TYPE_THERM_MODE, NETATMO_CREATE_CLIMATE, + SERVICE_CLEAR_TEMPERATURE_SETTING, SERVICE_SET_PRESET_MODE_WITH_END_DATETIME, SERVICE_SET_SCHEDULE, + SERVICE_SET_TEMPERATURE_WITH_END_DATETIME, + SERVICE_SET_TEMPERATURE_WITH_TIME_PERIOD, ) from .data_handler import HOME, SIGNAL_NAME, NetatmoRoom from .netatmo_entity_base import NetatmoBase @@ -143,6 +148,34 @@ async def async_setup_entry( }, "_async_service_set_preset_mode_with_end_datetime", ) + platform.async_register_entity_service( + SERVICE_SET_TEMPERATURE_WITH_END_DATETIME, + { + vol.Required(ATTR_TARGET_TEMPERATURE): vol.All( + vol.Coerce(float), vol.Range(min=7, max=30) + ), + vol.Required(ATTR_END_DATETIME): cv.datetime, + }, + "_async_service_set_temperature_with_end_datetime", + ) + platform.async_register_entity_service( + SERVICE_SET_TEMPERATURE_WITH_TIME_PERIOD, + { + vol.Required(ATTR_TARGET_TEMPERATURE): vol.All( + vol.Coerce(float), vol.Range(min=7, max=30) + ), + vol.Required(ATTR_TIME_PERIOD): vol.All( + cv.time_period, + cv.positive_timedelta, + ), + }, + "_async_service_set_temperature_with_time_period", + ) + platform.async_register_entity_service( + SERVICE_CLEAR_TEMPERATURE_SETTING, + {}, + "_async_service_clear_temperature_setting", + ) class NetatmoThermostat(NetatmoBase, ClimateEntity): @@ -441,12 +474,48 @@ class NetatmoThermostat(NetatmoBase, ClimateEntity): mode=PRESET_MAP_NETATMO[preset_mode], end_time=end_timestamp ) _LOGGER.debug( - "Setting %s preset to %s with optional end datetime to %s", + "Setting %s preset to %s with end datetime %s", self._room.home.entity_id, preset_mode, end_timestamp, ) + async def _async_service_set_temperature_with_end_datetime( + self, **kwargs: Any + ) -> None: + target_temperature = kwargs[ATTR_TARGET_TEMPERATURE] + end_datetime = kwargs[ATTR_END_DATETIME] + end_timestamp = int(dt_util.as_timestamp(end_datetime)) + + _LOGGER.debug( + "Setting %s to target temperature %s with end datetime %s", + self._room.entity_id, + target_temperature, + end_timestamp, + ) + await self._room.async_therm_manual(target_temperature, end_timestamp) + + async def _async_service_set_temperature_with_time_period( + self, **kwargs: Any + ) -> None: + target_temperature = kwargs[ATTR_TARGET_TEMPERATURE] + time_period = kwargs[ATTR_TIME_PERIOD] + + _LOGGER.debug( + "Setting %s to target temperature %s with time period %s", + self._room.entity_id, + target_temperature, + time_period, + ) + + now_timestamp = dt_util.as_timestamp(dt_util.utcnow()) + end_timestamp = int(now_timestamp + time_period.seconds) + await self._room.async_therm_manual(target_temperature, end_timestamp) + + async def _async_service_clear_temperature_setting(self, **kwargs: Any) -> None: + _LOGGER.debug("Clearing %s temperature setting", self._room.entity_id) + await self._room.async_therm_home() + @property def device_info(self) -> DeviceInfo: """Return the device info for the thermostat.""" diff --git a/homeassistant/components/netatmo/config_flow.py b/homeassistant/components/netatmo/config_flow.py index b4e6d838537..bae81a7762f 100644 --- a/homeassistant/components/netatmo/config_flow.py +++ b/homeassistant/components/netatmo/config_flow.py @@ -6,7 +6,6 @@ import logging from typing import Any import uuid -from pyatmo.const import ALL_SCOPES import voluptuous as vol from homeassistant import config_entries @@ -15,6 +14,7 @@ from homeassistant.core import callback from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv +from .api import get_api_scopes from .const import ( CONF_AREA_NAME, CONF_LAT_NE, @@ -53,13 +53,7 @@ class NetatmoFlowHandler( @property def extra_authorize_data(self) -> dict: """Extra data that needs to be appended to the authorize url.""" - exclude = [] - if self.flow_impl.name == "Home Assistant Cloud": - exclude = ["access_doorbell", "read_doorbell"] - - scopes = [scope for scope in ALL_SCOPES if scope not in exclude] - scopes.sort() - + scopes = get_api_scopes(self.flow_impl.domain) return {"scope": " ".join(scopes)} async def async_step_user(self, user_input: dict | None = None) -> FlowResult: diff --git a/homeassistant/components/netatmo/const.py b/homeassistant/components/netatmo/const.py index 9e7ac33c8b6..3fe456dd657 100644 --- a/homeassistant/components/netatmo/const.py +++ b/homeassistant/components/netatmo/const.py @@ -30,6 +30,13 @@ HOME_DATA = "netatmo_home_data" DATA_HANDLER = "netatmo_data_handler" SIGNAL_NAME = "signal_name" +API_SCOPES_EXCLUDED_FROM_CLOUD = [ + "access_doorbell", + "read_doorbell", + "read_mhs1", + "write_mhs1", +] + NETATMO_CREATE_BATTERY = "netatmo_create_battery" NETATMO_CREATE_CAMERA = "netatmo_create_camera" NETATMO_CREATE_CAMERA_LIGHT = "netatmo_create_camera_light" @@ -82,12 +89,17 @@ ATTR_PSEUDO = "pseudo" ATTR_SCHEDULE_ID = "schedule_id" ATTR_SCHEDULE_NAME = "schedule_name" ATTR_SELECTED_SCHEDULE = "selected_schedule" +ATTR_TARGET_TEMPERATURE = "target_temperature" +ATTR_TIME_PERIOD = "time_period" +SERVICE_CLEAR_TEMPERATURE_SETTING = "clear_temperature_setting" SERVICE_SET_CAMERA_LIGHT = "set_camera_light" SERVICE_SET_PERSON_AWAY = "set_person_away" SERVICE_SET_PERSONS_HOME = "set_persons_home" SERVICE_SET_SCHEDULE = "set_schedule" SERVICE_SET_PRESET_MODE_WITH_END_DATETIME = "set_preset_mode_with_end_datetime" +SERVICE_SET_TEMPERATURE_WITH_END_DATETIME = "set_temperature_with_end_datetime" +SERVICE_SET_TEMPERATURE_WITH_TIME_PERIOD = "set_temperature_with_time_period" # Climate events EVENT_TYPE_CANCEL_SET_POINT = "cancel_set_point" diff --git a/homeassistant/components/netatmo/manifest.json b/homeassistant/components/netatmo/manifest.json index d031632ed75..7d84641874a 100644 --- a/homeassistant/components/netatmo/manifest.json +++ b/homeassistant/components/netatmo/manifest.json @@ -7,7 +7,7 @@ "dependencies": ["application_credentials", "webhook"], "documentation": "https://www.home-assistant.io/integrations/netatmo", "homekit": { - "models": ["Healty Home Coach", "Netatmo Relay", "Presence", "Welcome"] + "models": ["Healthy Home Coach", "Netatmo Relay", "Presence", "Welcome"] }, "integration_type": "hub", "iot_class": "cloud_polling", diff --git a/homeassistant/components/netatmo/services.yaml b/homeassistant/components/netatmo/services.yaml index 228f84f175d..cab0528199d 100644 --- a/homeassistant/components/netatmo/services.yaml +++ b/homeassistant/components/netatmo/services.yaml @@ -46,6 +46,56 @@ set_preset_mode_with_end_datetime: selector: datetime: +set_temperature_with_end_datetime: + target: + entity: + integration: netatmo + domain: climate + fields: + target_temperature: + required: true + example: "19.5" + selector: + number: + min: 7 + max: 30 + step: 0.5 + end_datetime: + required: true + example: '"2019-04-20 05:04:20"' + selector: + datetime: + +set_temperature_with_time_period: + target: + entity: + integration: netatmo + domain: climate + fields: + target_temperature: + required: true + example: "19.5" + selector: + number: + min: 7 + max: 30 + step: 0.5 + time_period: + required: true + default: + hours: 3 + minutes: 0 + seconds: 0 + days: 0 + selector: + duration: + +clear_temperature_setting: + target: + entity: + integration: netatmo + domain: climate + set_persons_home: target: entity: diff --git a/homeassistant/components/netatmo/strings.json b/homeassistant/components/netatmo/strings.json index bdb51808852..e504b27b599 100644 --- a/homeassistant/components/netatmo/strings.json +++ b/homeassistant/components/netatmo/strings.json @@ -121,7 +121,7 @@ "description": "Unregisters the webhook from the Netatmo backend." }, "set_preset_mode_with_end_datetime": { - "name": "Set preset mode with end datetime", + "name": "Set preset mode with end date & time", "description": "Sets the preset mode for a Netatmo climate device. The preset mode must match a preset mode configured at Netatmo.", "fields": { "preset_mode": { @@ -129,10 +129,42 @@ "description": "Climate preset mode such as Schedule, Away or Frost Guard." }, "end_datetime": { - "name": "End datetime", - "description": "Datetime for until when the preset will be active." + "name": "End date & time", + "description": "Date & time the preset will be active until." } } + }, + "set_temperature_with_end_datetime": { + "name": "Set temperature with end date & time", + "description": "Sets the target temperature for a Netatmo climate device with an end date & time.", + "fields": { + "target_temperature": { + "name": "Target temperature", + "description": "The target temperature for the device." + }, + "end_datetime": { + "name": "[%key:component::netatmo::services::set_preset_mode_with_end_datetime::fields::end_datetime::name%]", + "description": "Date & time the target temperature will be active until." + } + } + }, + "set_temperature_with_time_period": { + "name": "Set temperature with time period", + "description": "Sets the target temperature for a Netatmo climate device with time period.", + "fields": { + "target_temperature": { + "name": "[%key:component::netatmo::services::set_temperature_with_end_datetime::fields::target_temperature::name%]", + "description": "[%key:component::netatmo::services::set_temperature_with_end_datetime::fields::target_temperature::description%]" + }, + "time_period": { + "name": "Time period", + "description": "The time period which the temperature setting will be active for." + } + } + }, + "clear_temperature_setting": { + "name": "Clear temperature setting", + "description": "Clears any temperature setting for a Netatmo climate device reverting it to the current preset or schedule." } } } diff --git a/homeassistant/components/nextcloud/sensor.py b/homeassistant/components/nextcloud/sensor.py index 16c8adb77ce..6800c403ee8 100644 --- a/homeassistant/components/nextcloud/sensor.py +++ b/homeassistant/components/nextcloud/sensor.py @@ -10,6 +10,7 @@ from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, + SensorStateClass, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -33,33 +34,37 @@ UNIT_OF_LOAD: Final[str] = "load" class NextcloudSensorEntityDescription(SensorEntityDescription): """Describes Nextcloud sensor entity.""" - value_fn: Callable[ - [str | int | float], str | int | float | datetime - ] = lambda value: value + value_fn: Callable[[str | int | float], str | int | float | datetime] = ( + lambda value: value + ) SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="activeUsers_last1hour", translation_key="nextcloud_activeusers_last1hour", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, icon="mdi:account-multiple", ), NextcloudSensorEntityDescription( key="activeUsers_last24hours", translation_key="nextcloud_activeusers_last24hours", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, icon="mdi:account-multiple", ), NextcloudSensorEntityDescription( key="activeUsers_last5minutes", translation_key="nextcloud_activeusers_last5minutes", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, icon="mdi:account-multiple", ), NextcloudSensorEntityDescription( key="cache_expunges", translation_key="nextcloud_cache_expunges", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -81,30 +86,35 @@ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="cache_num_entries", translation_key="nextcloud_cache_num_entries", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="cache_num_hits", translation_key="nextcloud_cache_num_hits", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="cache_num_inserts", translation_key="nextcloud_cache_num_inserts", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="cache_num_misses", translation_key="nextcloud_cache_num_misses", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="cache_num_slots", translation_key="nextcloud_cache_num_slots", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -166,6 +176,7 @@ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="interned_strings_usage_number_of_strings", translation_key="nextcloud_interned_strings_usage_number_of_strings", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -220,6 +231,7 @@ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="opcache_statistics_blacklist_miss_ratio", translation_key="nextcloud_opcache_statistics_blacklist_miss_ratio", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, native_unit_of_measurement=PERCENTAGE, @@ -227,18 +239,21 @@ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="opcache_statistics_blacklist_misses", translation_key="nextcloud_opcache_statistics_blacklist_misses", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="opcache_statistics_hash_restarts", translation_key="nextcloud_opcache_statistics_hash_restarts", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="opcache_statistics_hits", translation_key="nextcloud_opcache_statistics_hits", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -253,36 +268,42 @@ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="opcache_statistics_manual_restarts", translation_key="nextcloud_opcache_statistics_manual_restarts", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="opcache_statistics_max_cached_keys", translation_key="nextcloud_opcache_statistics_max_cached_keys", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="opcache_statistics_misses", translation_key="nextcloud_opcache_statistics_misses", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="opcache_statistics_num_cached_keys", translation_key="nextcloud_opcache_statistics_num_cached_keys", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="opcache_statistics_num_cached_scripts", translation_key="nextcloud_opcache_statistics_num_cached_scripts", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), NextcloudSensorEntityDescription( key="opcache_statistics_oom_restarts", translation_key="nextcloud_opcache_statistics_oom_restarts", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -386,45 +407,54 @@ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="shares_num_fed_shares_sent", translation_key="nextcloud_shares_num_fed_shares_sent", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="shares_num_fed_shares_received", translation_key="nextcloud_shares_num_fed_shares_received", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="shares_num_shares", translation_key="nextcloud_shares_num_shares", + state_class=SensorStateClass.MEASUREMENT, ), NextcloudSensorEntityDescription( key="shares_num_shares_groups", translation_key="nextcloud_shares_num_shares_groups", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="shares_num_shares_link", translation_key="nextcloud_shares_num_shares_link", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="shares_num_shares_link_no_password", translation_key="nextcloud_shares_num_shares_link_no_password", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="shares_num_shares_mail", translation_key="nextcloud_shares_num_shares_mail", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="shares_num_shares_room", translation_key="nextcloud_shares_num_shares_room", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="shares_num_shares_user", translation_key="nextcloud_shares_num_shares_user", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( @@ -440,6 +470,7 @@ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="sma_num_seg", translation_key="nextcloud_sma_num_seg", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -456,37 +487,45 @@ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ NextcloudSensorEntityDescription( key="storage_num_files", translation_key="nextcloud_storage_num_files", + state_class=SensorStateClass.MEASUREMENT, ), NextcloudSensorEntityDescription( key="storage_num_storages", translation_key="nextcloud_storage_num_storages", + state_class=SensorStateClass.MEASUREMENT, ), NextcloudSensorEntityDescription( key="storage_num_storages_home", translation_key="nextcloud_storage_num_storages_home", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="storage_num_storages_local", translation_key="nextcloud_storage_num_storages_local", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="storage_num_storages_other", translation_key="nextcloud_storage_num_storages_other", + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, ), NextcloudSensorEntityDescription( key="storage_num_users", translation_key="nextcloud_storage_num_users", + state_class=SensorStateClass.MEASUREMENT, ), NextcloudSensorEntityDescription( key="system_apps_num_installed", translation_key="nextcloud_system_apps_num_installed", + state_class=SensorStateClass.MEASUREMENT, ), NextcloudSensorEntityDescription( key="system_apps_num_updates_available", translation_key="nextcloud_system_apps_num_updates_available", + state_class=SensorStateClass.MEASUREMENT, icon="mdi:update", ), NextcloudSensorEntityDescription( diff --git a/homeassistant/components/nextdns/manifest.json b/homeassistant/components/nextdns/manifest.json index 725ce1b9201..611021d73e4 100644 --- a/homeassistant/components/nextdns/manifest.json +++ b/homeassistant/components/nextdns/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["nextdns"], "quality_scale": "platinum", - "requirements": ["nextdns==2.0.1"] + "requirements": ["nextdns==2.1.0"] } diff --git a/homeassistant/components/nibe_heatpump/manifest.json b/homeassistant/components/nibe_heatpump/manifest.json index 73c4dc51089..94a2a76c814 100644 --- a/homeassistant/components/nibe_heatpump/manifest.json +++ b/homeassistant/components/nibe_heatpump/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nibe_heatpump", "iot_class": "local_polling", - "requirements": ["nibe==2.5.0"] + "requirements": ["nibe==2.5.2"] } diff --git a/homeassistant/components/nobo_hub/__init__.py b/homeassistant/components/nobo_hub/__init__.py index bc2c328d647..6c77f98d1b1 100644 --- a/homeassistant/components/nobo_hub/__init__.py +++ b/homeassistant/components/nobo_hub/__init__.py @@ -4,26 +4,12 @@ from __future__ import annotations from pynobo import nobo from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_NAME, - CONF_IP_ADDRESS, - EVENT_HOMEASSISTANT_STOP, - Platform, -) +from homeassistant.const import CONF_IP_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from .const import ( - ATTR_HARDWARE_VERSION, - ATTR_SERIAL, - ATTR_SOFTWARE_VERSION, - CONF_AUTO_DISCOVERED, - CONF_SERIAL, - DOMAIN, - NOBO_MANUFACTURER, -) +from .const import CONF_AUTO_DISCOVERED, CONF_SERIAL, DOMAIN -PLATFORMS = [Platform.CLIMATE, Platform.SENSOR] +PLATFORMS = [Platform.CLIMATE, Platform.SELECT, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -37,17 +23,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data.setdefault(DOMAIN, {}) - # Register hub as device - dev_reg = dr.async_get(hass) - dev_reg.async_get_or_create( - config_entry_id=entry.entry_id, - identifiers={(DOMAIN, hub.hub_info[ATTR_SERIAL])}, - manufacturer=NOBO_MANUFACTURER, - name=hub.hub_info[ATTR_NAME], - model=f"Nobø Ecohub ({hub.hub_info[ATTR_HARDWARE_VERSION]})", - sw_version=hub.hub_info[ATTR_SOFTWARE_VERSION], - ) - async def _async_close(event): """Close the Nobø Ecohub socket connection when HA stops.""" await hub.stop() diff --git a/homeassistant/components/nobo_hub/manifest.json b/homeassistant/components/nobo_hub/manifest.json index 4e6009ce6d7..9ddbed7dadc 100644 --- a/homeassistant/components/nobo_hub/manifest.json +++ b/homeassistant/components/nobo_hub/manifest.json @@ -4,6 +4,7 @@ "codeowners": ["@echoromeo", "@oyvindwe"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nobo_hub", + "integration_type": "hub", "iot_class": "local_push", "requirements": ["pynobo==1.6.0"] } diff --git a/homeassistant/components/nobo_hub/select.py b/homeassistant/components/nobo_hub/select.py new file mode 100644 index 00000000000..b386e158420 --- /dev/null +++ b/homeassistant/components/nobo_hub/select.py @@ -0,0 +1,170 @@ +"""Python Control of Nobø Hub - Nobø Energy Control.""" +from __future__ import annotations + +from pynobo import nobo + +from homeassistant.components.select import SelectEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_NAME +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import ( + ATTR_HARDWARE_VERSION, + ATTR_SERIAL, + ATTR_SOFTWARE_VERSION, + CONF_OVERRIDE_TYPE, + DOMAIN, + NOBO_MANUFACTURER, + OVERRIDE_TYPE_NOW, +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up any temperature sensors connected to the Nobø Ecohub.""" + + # Setup connection with hub + hub: nobo = hass.data[DOMAIN][config_entry.entry_id] + + override_type = ( + nobo.API.OVERRIDE_TYPE_NOW + if config_entry.options.get(CONF_OVERRIDE_TYPE) == OVERRIDE_TYPE_NOW + else nobo.API.OVERRIDE_TYPE_CONSTANT + ) + + entities: list[SelectEntity] = [ + NoboProfileSelector(zone_id, hub) for zone_id in hub.zones + ] + entities.append(NoboGlobalSelector(hub, override_type)) + async_add_entities(entities, True) + + +class NoboGlobalSelector(SelectEntity): + """Global override selector for Nobø Ecohub.""" + + _attr_has_entity_name = True + _attr_translation_key = "global_override" + _attr_device_class = "nobo_hub__override" + _attr_should_poll = False + _modes = { + nobo.API.OVERRIDE_MODE_NORMAL: "none", + nobo.API.OVERRIDE_MODE_AWAY: "away", + nobo.API.OVERRIDE_MODE_COMFORT: "comfort", + nobo.API.OVERRIDE_MODE_ECO: "eco", + } + _attr_options = list(_modes.values()) + _attr_current_option: str + + def __init__(self, hub: nobo, override_type) -> None: + """Initialize the global override selector.""" + self._nobo = hub + self._attr_unique_id = hub.hub_serial + self._override_type = override_type + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, hub.hub_serial)}, + name=hub.hub_info[ATTR_NAME], + manufacturer=NOBO_MANUFACTURER, + model=f"Nobø Ecohub ({hub.hub_info[ATTR_HARDWARE_VERSION]})", + sw_version=hub.hub_info[ATTR_SOFTWARE_VERSION], + ) + + async def async_added_to_hass(self) -> None: + """Register callback from hub.""" + self._nobo.register_callback(self._after_update) + + async def async_will_remove_from_hass(self) -> None: + """Deregister callback from hub.""" + self._nobo.deregister_callback(self._after_update) + + async def async_select_option(self, option: str) -> None: + """Set override.""" + mode = [k for k, v in self._modes.items() if v == option][0] + try: + await self._nobo.async_create_override( + mode, self._override_type, nobo.API.OVERRIDE_TARGET_GLOBAL + ) + except Exception as exp: + raise HomeAssistantError from exp + + async def async_update(self) -> None: + """Fetch new state data for this zone.""" + self._read_state() + + @callback + def _read_state(self) -> None: + for override in self._nobo.overrides.values(): + if override["target_type"] == nobo.API.OVERRIDE_TARGET_GLOBAL: + self._attr_current_option = self._modes[override["mode"]] + break + + @callback + def _after_update(self, hub) -> None: + self._read_state() + self.async_write_ha_state() + + +class NoboProfileSelector(SelectEntity): + """Week profile selector for Nobø zones.""" + + _attr_translation_key = "week_profile" + _attr_has_entity_name = True + _attr_should_poll = False + _profiles: dict[int, str] = {} + _attr_options: list[str] = [] + _attr_current_option: str + + def __init__(self, zone_id: str, hub: nobo) -> None: + """Initialize the week profile selector.""" + self._id = zone_id + self._nobo = hub + self._attr_unique_id = f"{hub.hub_serial}:{zone_id}:profile" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{hub.hub_serial}:{zone_id}")}, + name=hub.zones[zone_id][ATTR_NAME], + via_device=(DOMAIN, hub.hub_info[ATTR_SERIAL]), + suggested_area=hub.zones[zone_id][ATTR_NAME], + ) + + async def async_added_to_hass(self) -> None: + """Register callback from hub.""" + self._nobo.register_callback(self._after_update) + + async def async_will_remove_from_hass(self) -> None: + """Deregister callback from hub.""" + self._nobo.deregister_callback(self._after_update) + + async def async_select_option(self, option: str) -> None: + """Set week profile.""" + week_profile_id = [k for k, v in self._profiles.items() if v == option][0] + try: + await self._nobo.async_update_zone( + self._id, week_profile_id=week_profile_id + ) + except Exception as exp: + raise HomeAssistantError from exp + + async def async_update(self) -> None: + """Fetch new state data for this zone.""" + self._read_state() + + @callback + def _read_state(self) -> None: + self._profiles = { + profile["week_profile_id"]: profile["name"].replace("\xa0", " ") + for profile in self._nobo.week_profiles.values() + } + self._attr_options = sorted(self._profiles.values()) + self._attr_current_option = self._profiles[ + self._nobo.zones[self._id]["week_profile_id"] + ] + + @callback + def _after_update(self, hub) -> None: + self._read_state() + self.async_write_ha_state() diff --git a/homeassistant/components/nobo_hub/strings.json b/homeassistant/components/nobo_hub/strings.json index cfa339c98df..28be01862e9 100644 --- a/homeassistant/components/nobo_hub/strings.json +++ b/homeassistant/components/nobo_hub/strings.json @@ -40,5 +40,21 @@ "description": "Select override type \"Now\" to end override on next week profile change." } } + }, + "entity": { + "select": { + "global_override": { + "name": "global override", + "state": { + "away": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]", + "comfort": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]", + "eco": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::eco%]", + "none": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::none%]" + } + }, + "week_profile": { + "name": "week profile" + } + } } } diff --git a/homeassistant/components/nuki/__init__.py b/homeassistant/components/nuki/__init__.py index ede7a20ccdb..3f17c0b795b 100644 --- a/homeassistant/components/nuki/__init__.py +++ b/homeassistant/components/nuki/__init__.py @@ -39,7 +39,7 @@ from homeassistant.helpers.update_coordinator import ( UpdateFailed, ) -from .const import DEFAULT_TIMEOUT, DOMAIN, ERROR_STATES +from .const import CONF_ENCRYPT_TOKEN, DEFAULT_TIMEOUT, DOMAIN, ERROR_STATES from .helpers import NukiWebhookException, parse_id _NukiDeviceT = TypeVar("_NukiDeviceT", bound=NukiDevice) @@ -188,7 +188,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.data[CONF_HOST], entry.data[CONF_TOKEN], entry.data[CONF_PORT], - True, + entry.data.get(CONF_ENCRYPT_TOKEN, True), DEFAULT_TIMEOUT, ) diff --git a/homeassistant/components/nuki/config_flow.py b/homeassistant/components/nuki/config_flow.py index 310197d55d8..4acfecf492b 100644 --- a/homeassistant/components/nuki/config_flow.py +++ b/homeassistant/components/nuki/config_flow.py @@ -13,7 +13,7 @@ from homeassistant.components import dhcp from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN from homeassistant.data_entry_flow import FlowResult -from .const import DEFAULT_PORT, DEFAULT_TIMEOUT, DOMAIN +from .const import CONF_ENCRYPT_TOKEN, DEFAULT_PORT, DEFAULT_TIMEOUT, DOMAIN from .helpers import CannotConnect, InvalidAuth, parse_id _LOGGER = logging.getLogger(__name__) @@ -26,7 +26,12 @@ USER_SCHEMA = vol.Schema( } ) -REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_TOKEN): str}) +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_TOKEN): str, + vol.Optional(CONF_ENCRYPT_TOKEN, default=True): bool, + } +) async def validate_input(hass, data): @@ -41,7 +46,7 @@ async def validate_input(hass, data): data[CONF_HOST], data[CONF_TOKEN], data[CONF_PORT], - True, + data.get(CONF_ENCRYPT_TOKEN, True), DEFAULT_TIMEOUT, ) @@ -100,6 +105,7 @@ class NukiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): CONF_HOST: self._data[CONF_HOST], CONF_PORT: self._data[CONF_PORT], CONF_TOKEN: user_input[CONF_TOKEN], + CONF_ENCRYPT_TOKEN: user_input[CONF_ENCRYPT_TOKEN], } try: @@ -131,8 +137,15 @@ class NukiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): async def async_step_validate(self, user_input=None): """Handle init step of a flow.""" + data_schema = self.discovery_schema or USER_SCHEMA + errors = {} if user_input is not None: + data_schema = USER_SCHEMA.extend( + { + vol.Optional(CONF_ENCRYPT_TOKEN, default=True): bool, + } + ) try: info = await validate_input(self.hass, user_input) except CannotConnect: @@ -149,7 +162,8 @@ class NukiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() return self.async_create_entry(title=bridge_id, data=user_input) - data_schema = self.discovery_schema or USER_SCHEMA return self.async_show_form( - step_id="user", data_schema=data_schema, errors=errors + step_id="user", + data_schema=self.add_suggested_values_to_schema(data_schema, user_input), + errors=errors, ) diff --git a/homeassistant/components/nuki/const.py b/homeassistant/components/nuki/const.py index dee4a8b8ac5..21a2dcf9e5b 100644 --- a/homeassistant/components/nuki/const.py +++ b/homeassistant/components/nuki/const.py @@ -12,3 +12,6 @@ DEFAULT_PORT = 8080 DEFAULT_TIMEOUT = 20 ERROR_STATES = (0, 254, 255) + +# Encrypt token, instead of using a plaintext token +CONF_ENCRYPT_TOKEN = "encrypt_token" diff --git a/homeassistant/components/nuki/strings.json b/homeassistant/components/nuki/strings.json index 19aeae989f4..eb380cabd04 100644 --- a/homeassistant/components/nuki/strings.json +++ b/homeassistant/components/nuki/strings.json @@ -5,14 +5,16 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", - "token": "[%key:common::config_flow::data::access_token%]" + "token": "[%key:common::config_flow::data::access_token%]", + "encrypt_token": "Use an encrypted token for authentication." } }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", "description": "The Nuki integration needs to re-authenticate with your bridge.", "data": { - "token": "[%key:common::config_flow::data::access_token%]" + "token": "[%key:common::config_flow::data::access_token%]", + "encrypt_token": "[%key:component::nuki::config::step::user::data::encrypt_token%]" } } }, diff --git a/homeassistant/components/nws/manifest.json b/homeassistant/components/nws/manifest.json index 05194d85a26..4006a145db4 100644 --- a/homeassistant/components/nws/manifest.json +++ b/homeassistant/components/nws/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["metar", "pynws"], "quality_scale": "platinum", - "requirements": ["pynws==1.5.1"] + "requirements": ["pynws==1.6.0"] } diff --git a/homeassistant/components/octoprint/camera.py b/homeassistant/components/octoprint/camera.py index 99052993a61..a6955706508 100644 --- a/homeassistant/components/octoprint/camera.py +++ b/homeassistant/components/octoprint/camera.py @@ -7,8 +7,8 @@ from homeassistant.components.mjpeg.camera import MjpegCamera from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_VERIFY_SSL from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import OctoprintDataUpdateCoordinator from .const import DOMAIN @@ -38,7 +38,7 @@ async def async_setup_entry( [ OctoprintCamera( camera_info, - coordinator.device_info, + coordinator, device_id, verify_ssl, ) @@ -46,19 +46,23 @@ async def async_setup_entry( ) -class OctoprintCamera(MjpegCamera): +class OctoprintCamera(CoordinatorEntity[OctoprintDataUpdateCoordinator], MjpegCamera): """Representation of an OctoPrint Camera Stream.""" def __init__( self, camera_settings: WebcamSettings, - device_info: DeviceInfo, + coordinator: OctoprintDataUpdateCoordinator, device_id: str, verify_ssl: bool, ) -> None: """Initialize as a subclass of MjpegCamera.""" super().__init__( - device_info=device_info, + coordinator=coordinator, + ) + MjpegCamera.__init__( + self, + device_info=coordinator.device_info, mjpeg_url=camera_settings.stream_url, name="OctoPrint Camera", still_image_url=camera_settings.external_snapshot_url, diff --git a/homeassistant/components/onvif/base.py b/homeassistant/components/onvif/base.py index 8771ae7a701..5f8a7d978d1 100644 --- a/homeassistant/components/onvif/base.py +++ b/homeassistant/components/onvif/base.py @@ -32,8 +32,7 @@ class ONVIFBaseEntity(Entity): See: https://github.com/home-assistant/core/issues/35883 """ return ( - self.device.info.mac - or self.device.info.serial_number # type:ignore[return-value] + self.device.info.mac or self.device.info.serial_number # type:ignore[return-value] ) @property diff --git a/homeassistant/components/openai_conversation/__init__.py b/homeassistant/components/openai_conversation/__init__.py index 9f4c30d91ba..054ccbdbe37 100644 --- a/homeassistant/components/openai_conversation/__init__.py +++ b/homeassistant/components/openai_conversation/__init__.py @@ -89,7 +89,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: await hass.async_add_executor_job( partial( - openai.Engine.list, + openai.Model.list, api_key=entry.data[CONF_API_KEY], request_timeout=10, ) @@ -141,7 +141,7 @@ class OpenAIAgent(conversation.AbstractConversationAgent): conversation_id = user_input.conversation_id messages = self.history[conversation_id] else: - conversation_id = ulid.ulid() + conversation_id = ulid.ulid_now() try: prompt = self._async_generate_prompt(raw_prompt) except TemplateError as err: diff --git a/homeassistant/components/openai_conversation/config_flow.py b/homeassistant/components/openai_conversation/config_flow.py index b391f531eb1..9c5ef32d796 100644 --- a/homeassistant/components/openai_conversation/config_flow.py +++ b/homeassistant/components/openai_conversation/config_flow.py @@ -60,7 +60,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None: Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ openai.api_key = data[CONF_API_KEY] - await hass.async_add_executor_job(partial(openai.Engine.list, request_timeout=10)) + await hass.async_add_executor_job(partial(openai.Model.list, request_timeout=10)) class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): diff --git a/homeassistant/components/openexchangerates/config_flow.py b/homeassistant/components/openexchangerates/config_flow.py index a61264dbf41..b78227ed1e5 100644 --- a/homeassistant/components/openexchangerates/config_flow.py +++ b/homeassistant/components/openexchangerates/config_flow.py @@ -66,7 +66,11 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): self._reauth_entry.data if self._reauth_entry else {} ) return self.async_show_form( - step_id="user", data_schema=get_data_schema(currencies, existing_data) + step_id="user", + data_schema=get_data_schema(currencies, existing_data), + description_placeholders={ + "signup": "https://openexchangerates.org/signup" + }, ) errors = {} diff --git a/homeassistant/components/openexchangerates/sensor.py b/homeassistant/components/openexchangerates/sensor.py index 70f2f670de8..66baf54c16a 100644 --- a/homeassistant/components/openexchangerates/sensor.py +++ b/homeassistant/components/openexchangerates/sensor.py @@ -64,4 +64,4 @@ class OpenexchangeratesSensor( @property def native_value(self) -> float: """Return the state of the sensor.""" - return round(self.coordinator.data.rates[self._quote], 4) + return self.coordinator.data.rates[self._quote] diff --git a/homeassistant/components/ourgroceries/__init__.py b/homeassistant/components/ourgroceries/__init__.py new file mode 100644 index 00000000000..d645b8617c2 --- /dev/null +++ b/homeassistant/components/ourgroceries/__init__.py @@ -0,0 +1,50 @@ +"""The OurGroceries integration.""" +from __future__ import annotations + +from asyncio import TimeoutError as AsyncIOTimeoutError + +from aiohttp import ClientError +from ourgroceries import OurGroceries +from ourgroceries.exceptions import InvalidLoginException + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +from .const import DOMAIN +from .coordinator import OurGroceriesDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.TODO] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up OurGroceries from a config entry.""" + + hass.data.setdefault(DOMAIN, {}) + data = entry.data + og = OurGroceries(data[CONF_USERNAME], data[CONF_PASSWORD]) + lists = [] + try: + await og.login() + lists = (await og.get_my_lists())["shoppingLists"] + except (AsyncIOTimeoutError, ClientError) as error: + raise ConfigEntryNotReady from error + except InvalidLoginException: + return False + + coordinator = OurGroceriesDataUpdateCoordinator(hass, og, lists) + await coordinator.async_config_entry_first_refresh() + hass.data[DOMAIN][entry.entry_id] = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + hass.data[DOMAIN].pop(entry.entry_id) + + return unload_ok diff --git a/homeassistant/components/ourgroceries/config_flow.py b/homeassistant/components/ourgroceries/config_flow.py new file mode 100644 index 00000000000..a982325fceb --- /dev/null +++ b/homeassistant/components/ourgroceries/config_flow.py @@ -0,0 +1,57 @@ +"""Config flow for OurGroceries integration.""" +from __future__ import annotations + +from asyncio import TimeoutError as AsyncIOTimeoutError +import logging +from typing import Any + +from aiohttp import ClientError +from ourgroceries import OurGroceries +from ourgroceries.exceptions import InvalidLoginException +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.data_entry_flow import FlowResult + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for OurGroceries.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + og = OurGroceries(user_input[CONF_USERNAME], user_input[CONF_PASSWORD]) + try: + await og.login() + except (AsyncIOTimeoutError, ClientError): + errors["base"] = "cannot_connect" + except InvalidLoginException: + errors["base"] = "invalid_auth" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=user_input[CONF_USERNAME], data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/ourgroceries/const.py b/homeassistant/components/ourgroceries/const.py new file mode 100644 index 00000000000..ba0ff789522 --- /dev/null +++ b/homeassistant/components/ourgroceries/const.py @@ -0,0 +1,3 @@ +"""Constants for the OurGroceries integration.""" + +DOMAIN = "ourgroceries" diff --git a/homeassistant/components/ourgroceries/coordinator.py b/homeassistant/components/ourgroceries/coordinator.py new file mode 100644 index 00000000000..636ebcc300a --- /dev/null +++ b/homeassistant/components/ourgroceries/coordinator.py @@ -0,0 +1,47 @@ +"""The OurGroceries coordinator.""" +from __future__ import annotations + +import asyncio +from datetime import timedelta +import logging + +from ourgroceries import OurGroceries + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN + +SCAN_INTERVAL = 60 + +_LOGGER = logging.getLogger(__name__) + + +class OurGroceriesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): + """Class to manage fetching OurGroceries data.""" + + def __init__( + self, hass: HomeAssistant, og: OurGroceries, lists: list[dict] + ) -> None: + """Initialize global OurGroceries data updater.""" + self.og = og + self.lists = lists + self._ids = [sl["id"] for sl in lists] + interval = timedelta(seconds=SCAN_INTERVAL) + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=interval, + ) + + async def _async_update_data(self) -> dict[str, dict]: + """Fetch data from OurGroceries.""" + return dict( + zip( + self._ids, + await asyncio.gather( + *[self.og.get_list_items(list_id=id) for id in self._ids] + ), + ) + ) diff --git a/homeassistant/components/ourgroceries/manifest.json b/homeassistant/components/ourgroceries/manifest.json new file mode 100644 index 00000000000..ec5a5039b39 --- /dev/null +++ b/homeassistant/components/ourgroceries/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "ourgroceries", + "name": "OurGroceries", + "codeowners": ["@OnFreund"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/ourgroceries", + "iot_class": "cloud_polling", + "requirements": ["ourgroceries==1.5.4"] +} diff --git a/homeassistant/components/komfovent/strings.json b/homeassistant/components/ourgroceries/strings.json similarity index 66% rename from homeassistant/components/komfovent/strings.json rename to homeassistant/components/ourgroceries/strings.json index 074754c1fe0..78a46954183 100644 --- a/homeassistant/components/komfovent/strings.json +++ b/homeassistant/components/ourgroceries/strings.json @@ -3,7 +3,6 @@ "step": { "user": { "data": { - "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } @@ -12,11 +11,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "invalid_input": "Failed to parse provided hostname", "unknown": "[%key:common::config_flow::error::unknown%]" - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } } } diff --git a/homeassistant/components/ourgroceries/todo.py b/homeassistant/components/ourgroceries/todo.py new file mode 100644 index 00000000000..8115066d0fb --- /dev/null +++ b/homeassistant/components/ourgroceries/todo.py @@ -0,0 +1,119 @@ +"""A todo platform for OurGroceries.""" + +import asyncio +from typing import Any + +from homeassistant.components.todo import ( + TodoItem, + TodoItemStatus, + TodoListEntity, + TodoListEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import OurGroceriesDataUpdateCoordinator + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up the OurGroceries todo platform config entry.""" + coordinator: OurGroceriesDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + async_add_entities( + OurGroceriesTodoListEntity(coordinator, sl["id"], sl["name"]) + for sl in coordinator.lists + ) + + +def _completion_status(item: dict[str, Any]) -> TodoItemStatus: + if item.get("crossedOffAt", False): + return TodoItemStatus.COMPLETED + return TodoItemStatus.NEEDS_ACTION + + +class OurGroceriesTodoListEntity( + CoordinatorEntity[OurGroceriesDataUpdateCoordinator], TodoListEntity +): + """An OurGroceries TodoListEntity.""" + + _attr_has_entity_name = True + _attr_supported_features = ( + TodoListEntityFeature.CREATE_TODO_ITEM + | TodoListEntityFeature.UPDATE_TODO_ITEM + | TodoListEntityFeature.DELETE_TODO_ITEM + ) + + def __init__( + self, + coordinator: OurGroceriesDataUpdateCoordinator, + list_id: str, + list_name: str, + ) -> None: + """Initialize TodoistTodoListEntity.""" + super().__init__(coordinator=coordinator) + self._list_id = list_id + self._attr_unique_id = list_id + self._attr_name = list_name + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if self.coordinator.data is None: + self._attr_todo_items = None + else: + self._attr_todo_items = [ + TodoItem( + summary=item["name"], + uid=item["id"], + status=_completion_status(item), + ) + for item in self.coordinator.data[self._list_id]["list"]["items"] + ] + super()._handle_coordinator_update() + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Create a To-do item.""" + if item.status != TodoItemStatus.NEEDS_ACTION: + raise ValueError("Only active tasks may be created.") + await self.coordinator.og.add_item_to_list( + self._list_id, item.summary, auto_category=True + ) + await self.coordinator.async_refresh() + + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update a To-do item.""" + if item.summary: + api_items = self.coordinator.data[self._list_id]["list"]["items"] + category = next( + api_item["categoryId"] + for api_item in api_items + if api_item["id"] == item.uid + ) + await self.coordinator.og.change_item_on_list( + self._list_id, item.uid, category, item.summary + ) + if item.status is not None: + cross_off = item.status == TodoItemStatus.COMPLETED + await self.coordinator.og.toggle_item_crossed_off( + self._list_id, item.uid, cross_off=cross_off + ) + await self.coordinator.async_refresh() + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete a To-do item.""" + await asyncio.gather( + *[ + self.coordinator.og.remove_item_from_list(self._list_id, uid) + for uid in uids + ] + ) + await self.coordinator.async_refresh() + + async def async_added_to_hass(self) -> None: + """When entity is added to hass update state from existing coordinator data.""" + await super().async_added_to_hass() + self._handle_coordinator_update() diff --git a/homeassistant/components/overkiz/__init__.py b/homeassistant/components/overkiz/__init__.py index 36713d972b1..ebc3f96a7f5 100644 --- a/homeassistant/components/overkiz/__init__.py +++ b/homeassistant/components/overkiz/__init__.py @@ -9,23 +9,32 @@ from typing import cast from aiohttp import ClientError from pyoverkiz.client import OverkizClient from pyoverkiz.const import SUPPORTED_SERVERS -from pyoverkiz.enums import OverkizState, UIClass, UIWidget +from pyoverkiz.enums import APIType, OverkizState, UIClass, UIWidget from pyoverkiz.exceptions import ( BadCredentialsException, MaintenanceException, NotSuchTokenException, TooManyRequestsException, ) -from pyoverkiz.models import Device, Scenario, Setup +from pyoverkiz.models import Device, OverkizServer, Scenario, Setup +from pyoverkiz.utils import generate_local_server from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, + CONF_VERIFY_SSL, + Platform, +) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import ( + CONF_API_TYPE, CONF_HUB, DOMAIN, LOGGER, @@ -48,15 +57,26 @@ class HomeAssistantOverkizData: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Overkiz from a config entry.""" - username = entry.data[CONF_USERNAME] - password = entry.data[CONF_PASSWORD] - server = SUPPORTED_SERVERS[entry.data[CONF_HUB]] + client: OverkizClient | None = None + api_type = entry.data.get(CONF_API_TYPE, APIType.CLOUD) - # To allow users with multiple accounts/hubs, we create a new session so they have separate cookies - session = async_create_clientsession(hass) - client = OverkizClient( - username=username, password=password, session=session, server=server - ) + # Local API + if api_type == APIType.LOCAL: + client = create_local_client( + hass, + host=entry.data[CONF_HOST], + token=entry.data[CONF_TOKEN], + verify_ssl=entry.data[CONF_VERIFY_SSL], + ) + + # Overkiz Cloud API + else: + client = create_cloud_client( + hass, + username=entry.data[CONF_USERNAME], + password=entry.data[CONF_PASSWORD], + server=SUPPORTED_SERVERS[entry.data[CONF_HUB]], + ) await _async_migrate_entries(hass, entry) @@ -211,3 +231,31 @@ async def _async_migrate_entries( await er.async_migrate_entries(hass, config_entry.entry_id, update_unique_id) return True + + +def create_local_client( + hass: HomeAssistant, host: str, token: str, verify_ssl: bool +) -> OverkizClient: + """Create Overkiz local client.""" + session = async_create_clientsession(hass, verify_ssl=verify_ssl) + + return OverkizClient( + username="", + password="", + token=token, + session=session, + server=generate_local_server(host=host), + verify_ssl=verify_ssl, + ) + + +def create_cloud_client( + hass: HomeAssistant, username: str, password: str, server: OverkizServer +) -> OverkizClient: + """Create Overkiz cloud client.""" + # To allow users with multiple accounts/hubs, we create a new session so they have separate cookies + session = async_create_clientsession(hass) + + return OverkizClient( + username=username, password=password, session=session, server=server + ) diff --git a/homeassistant/components/overkiz/climate.py b/homeassistant/components/overkiz/climate.py index a94c731ec8f..b6d31a8e685 100644 --- a/homeassistant/components/overkiz/climate.py +++ b/homeassistant/components/overkiz/climate.py @@ -7,7 +7,10 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import HomeAssistantOverkizData -from .climate_entities import WIDGET_TO_CLIMATE_ENTITY +from .climate_entities import ( + WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY, + WIDGET_TO_CLIMATE_ENTITY, +) from .const import DOMAIN @@ -24,3 +27,13 @@ async def async_setup_entry( for device in data.platforms[Platform.CLIMATE] if device.widget in WIDGET_TO_CLIMATE_ENTITY ) + + # Hitachi Air To Air Heat Pumps + async_add_entities( + WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget][device.protocol]( + device.device_url, data.coordinator + ) + for device in data.platforms[Platform.CLIMATE] + if device.widget in WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY + and device.protocol in WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY[device.widget] + ) diff --git a/homeassistant/components/overkiz/climate_entities/__init__.py b/homeassistant/components/overkiz/climate_entities/__init__.py index b6345dd9b95..c74ff2829cc 100644 --- a/homeassistant/components/overkiz/climate_entities/__init__.py +++ b/homeassistant/components/overkiz/climate_entities/__init__.py @@ -1,4 +1,5 @@ """Climate entities for the Overkiz (by Somfy) integration.""" +from pyoverkiz.enums import Protocol from pyoverkiz.enums.ui import UIWidget from .atlantic_electrical_heater import AtlanticElectricalHeater @@ -9,6 +10,7 @@ from .atlantic_electrical_towel_dryer import AtlanticElectricalTowelDryer from .atlantic_heat_recovery_ventilation import AtlanticHeatRecoveryVentilation from .atlantic_pass_apc_heating_zone import AtlanticPassAPCHeatingZone from .atlantic_pass_apc_zone_control import AtlanticPassAPCZoneControl +from .hitachi_air_to_air_heat_pump_hlrrwifi import HitachiAirToAirHeatPumpHLRRWIFI from .somfy_heating_temperature_interface import SomfyHeatingTemperatureInterface from .somfy_thermostat import SomfyThermostat from .valve_heating_temperature_interface import ValveHeatingTemperatureInterface @@ -26,3 +28,10 @@ WIDGET_TO_CLIMATE_ENTITY = { UIWidget.SOMFY_THERMOSTAT: SomfyThermostat, UIWidget.VALVE_HEATING_TEMPERATURE_INTERFACE: ValveHeatingTemperatureInterface, } + +# Hitachi air-to-air heatpumps come in 2 flavors (HLRRWIFI and OVP) that are separated in 2 classes +WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY = { + UIWidget.HITACHI_AIR_TO_AIR_HEAT_PUMP: { + Protocol.HLRR_WIFI: HitachiAirToAirHeatPumpHLRRWIFI, + }, +} diff --git a/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_hlrrwifi.py b/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_hlrrwifi.py new file mode 100644 index 00000000000..7a9e50d7130 --- /dev/null +++ b/homeassistant/components/overkiz/climate_entities/hitachi_air_to_air_heat_pump_hlrrwifi.py @@ -0,0 +1,280 @@ +"""Support for HitachiAirToAirHeatPump.""" +from __future__ import annotations + +from typing import Any, cast + +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState + +from homeassistant.components.climate import ( + FAN_AUTO, + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, + PRESET_NONE, + SWING_BOTH, + SWING_HORIZONTAL, + SWING_OFF, + SWING_VERTICAL, + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature + +from ..const import DOMAIN +from ..coordinator import OverkizDataUpdateCoordinator +from ..entity import OverkizEntity + +PRESET_HOLIDAY_MODE = "holiday_mode" +FAN_SILENT = "silent" +FAN_SPEED_STATE = OverkizState.HLRRWIFI_FAN_SPEED +LEAVE_HOME_STATE = OverkizState.HLRRWIFI_LEAVE_HOME +MAIN_OPERATION_STATE = OverkizState.HLRRWIFI_MAIN_OPERATION +MODE_CHANGE_STATE = OverkizState.HLRRWIFI_MODE_CHANGE +ROOM_TEMPERATURE_STATE = OverkizState.HLRRWIFI_ROOM_TEMPERATURE +SWING_STATE = OverkizState.HLRRWIFI_SWING + +OVERKIZ_TO_HVAC_MODES: dict[str, HVACMode] = { + OverkizCommandParam.AUTOHEATING: HVACMode.AUTO, + OverkizCommandParam.AUTOCOOLING: HVACMode.AUTO, + OverkizCommandParam.ON: HVACMode.HEAT, + OverkizCommandParam.OFF: HVACMode.OFF, + OverkizCommandParam.HEATING: HVACMode.HEAT, + OverkizCommandParam.FAN: HVACMode.FAN_ONLY, + OverkizCommandParam.DEHUMIDIFY: HVACMode.DRY, + OverkizCommandParam.COOLING: HVACMode.COOL, + OverkizCommandParam.AUTO: HVACMode.AUTO, +} + +HVAC_MODES_TO_OVERKIZ: dict[HVACMode, str] = { + HVACMode.AUTO: OverkizCommandParam.AUTO, + HVACMode.HEAT: OverkizCommandParam.HEATING, + HVACMode.OFF: OverkizCommandParam.AUTO, + HVACMode.FAN_ONLY: OverkizCommandParam.FAN, + HVACMode.DRY: OverkizCommandParam.DEHUMIDIFY, + HVACMode.COOL: OverkizCommandParam.COOLING, +} + +OVERKIZ_TO_SWING_MODES: dict[str, str] = { + OverkizCommandParam.BOTH: SWING_BOTH, + OverkizCommandParam.HORIZONTAL: SWING_HORIZONTAL, + OverkizCommandParam.STOP: SWING_OFF, + OverkizCommandParam.VERTICAL: SWING_VERTICAL, +} + +SWING_MODES_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_SWING_MODES.items()} + +OVERKIZ_TO_FAN_MODES: dict[str, str] = { + OverkizCommandParam.AUTO: FAN_AUTO, + OverkizCommandParam.HIGH: FAN_HIGH, + OverkizCommandParam.LOW: FAN_LOW, + OverkizCommandParam.MEDIUM: FAN_MEDIUM, + OverkizCommandParam.SILENT: FAN_SILENT, +} + +FAN_MODES_TO_OVERKIZ: dict[str, str] = { + FAN_AUTO: OverkizCommandParam.AUTO, + FAN_HIGH: OverkizCommandParam.HIGH, + FAN_LOW: OverkizCommandParam.LOW, + FAN_MEDIUM: OverkizCommandParam.MEDIUM, + FAN_SILENT: OverkizCommandParam.SILENT, +} + + +class HitachiAirToAirHeatPumpHLRRWIFI(OverkizEntity, ClimateEntity): + """Representation of Hitachi Air To Air HeatPump.""" + + _attr_hvac_modes = [*HVAC_MODES_TO_OVERKIZ] + _attr_preset_modes = [PRESET_NONE, PRESET_HOLIDAY_MODE] + _attr_swing_modes = [*SWING_MODES_TO_OVERKIZ] + _attr_target_temperature_step = 1.0 + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = DOMAIN + + def __init__( + self, device_url: str, coordinator: OverkizDataUpdateCoordinator + ) -> None: + """Init method.""" + super().__init__(device_url, coordinator) + + self._attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.PRESET_MODE + ) + + if self.device.states.get(SWING_STATE): + self._attr_supported_features |= ClimateEntityFeature.SWING_MODE + + if self._attr_device_info: + self._attr_device_info["manufacturer"] = "Hitachi" + + @property + def hvac_mode(self) -> HVACMode: + """Return hvac operation ie. heat, cool mode.""" + if ( + main_op_state := self.device.states[MAIN_OPERATION_STATE] + ) and main_op_state.value_as_str: + if main_op_state.value_as_str.lower() == OverkizCommandParam.OFF: + return HVACMode.OFF + + if ( + mode_change_state := self.device.states[MODE_CHANGE_STATE] + ) and mode_change_state.value_as_str: + sanitized_value = mode_change_state.value_as_str.lower() + return OVERKIZ_TO_HVAC_MODES[sanitized_value] + + return HVACMode.OFF + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + if hvac_mode == HVACMode.OFF: + await self._global_control(main_operation=OverkizCommandParam.OFF) + else: + await self._global_control( + main_operation=OverkizCommandParam.ON, + hvac_mode=HVAC_MODES_TO_OVERKIZ[hvac_mode], + ) + + @property + def fan_mode(self) -> str | None: + """Return the fan setting.""" + if (state := self.device.states[FAN_SPEED_STATE]) and state.value_as_str: + return OVERKIZ_TO_FAN_MODES[state.value_as_str] + + return None + + @property + def fan_modes(self) -> list[str] | None: + """Return the list of available fan modes.""" + return [*FAN_MODES_TO_OVERKIZ] + + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set new target fan mode.""" + await self._global_control(fan_mode=FAN_MODES_TO_OVERKIZ[fan_mode]) + + @property + def swing_mode(self) -> str | None: + """Return the swing setting.""" + if (state := self.device.states[SWING_STATE]) and state.value_as_str: + return OVERKIZ_TO_SWING_MODES[state.value_as_str] + + return None + + async def async_set_swing_mode(self, swing_mode: str) -> None: + """Set new target swing operation.""" + await self._global_control(swing_mode=SWING_MODES_TO_OVERKIZ[swing_mode]) + + @property + def target_temperature(self) -> int | None: + """Return the temperature.""" + if ( + temperature := self.device.states[OverkizState.CORE_TARGET_TEMPERATURE] + ) and temperature.value_as_int: + return temperature.value_as_int + + return None + + @property + def current_temperature(self) -> int | None: + """Return current temperature.""" + if (state := self.device.states[ROOM_TEMPERATURE_STATE]) and state.value_as_int: + return state.value_as_int + + return None + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new temperature.""" + temperature = cast(float, kwargs.get(ATTR_TEMPERATURE)) + await self._global_control(target_temperature=int(temperature)) + + @property + def preset_mode(self) -> str | None: + """Return the current preset mode, e.g., home, away, temp.""" + if (state := self.device.states[LEAVE_HOME_STATE]) and state.value_as_str: + if state.value_as_str == OverkizCommandParam.ON: + return PRESET_HOLIDAY_MODE + + if state.value_as_str == OverkizCommandParam.OFF: + return PRESET_NONE + + return None + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + if preset_mode == PRESET_HOLIDAY_MODE: + await self._global_control(leave_home=OverkizCommandParam.ON) + + if preset_mode == PRESET_NONE: + await self._global_control(leave_home=OverkizCommandParam.OFF) + + def _control_backfill( + self, value: str | None, state_name: str, fallback_value: str + ) -> str: + """Overkiz doesn't accept commands with undefined parameters. This function is guaranteed to return a `str` which is the provided `value` if set, or the current device state if set, or the provided `fallback_value` otherwise.""" + if value: + return value + state = self.device.states[state_name] + if state and state.value_as_str: + return state.value_as_str + return fallback_value + + async def _global_control( + self, + main_operation: str | None = None, + target_temperature: int | None = None, + fan_mode: str | None = None, + hvac_mode: str | None = None, + swing_mode: str | None = None, + leave_home: str | None = None, + ) -> None: + """Execute globalControl command with all parameters. There is no option to only set a single parameter, without passing all other values.""" + + main_operation = self._control_backfill( + main_operation, MAIN_OPERATION_STATE, OverkizCommandParam.ON + ) + target_temperature = target_temperature or self.target_temperature + + fan_mode = self._control_backfill( + fan_mode, + FAN_SPEED_STATE, + OverkizCommandParam.AUTO, + ) + hvac_mode = self._control_backfill( + hvac_mode, + MODE_CHANGE_STATE, + OverkizCommandParam.AUTO, + ).lower() # Overkiz can return states that have uppercase characters which are not accepted back as commands + if ( + hvac_mode.replace(" ", "") + in [ # Overkiz can return states like 'auto cooling' or 'autoHeating' that are not valid commands and need to be converted to 'auto' + OverkizCommandParam.AUTOCOOLING, + OverkizCommandParam.AUTOHEATING, + ] + ): + hvac_mode = OverkizCommandParam.AUTO + + swing_mode = self._control_backfill( + swing_mode, + SWING_STATE, + OverkizCommandParam.STOP, + ) + + leave_home = self._control_backfill( + leave_home, + LEAVE_HOME_STATE, + OverkizCommandParam.OFF, + ) + + command_data = [ + main_operation, # Main Operation + target_temperature, # Target Temperature + fan_mode, # Fan Mode + hvac_mode, # Mode + swing_mode, # Swing Mode + leave_home, # Leave Home + ] + + await self.executor.async_execute_command( + OverkizCommand.GLOBAL_CONTROL, *command_data + ) diff --git a/homeassistant/components/overkiz/config_flow.py b/homeassistant/components/overkiz/config_flow.py index eac749f1bc0..4f3f50bf0e8 100644 --- a/homeassistant/components/overkiz/config_flow.py +++ b/homeassistant/components/overkiz/config_flow.py @@ -1,31 +1,46 @@ -"""Config flow for Overkiz (by Somfy) integration.""" +"""Config flow for Overkiz integration.""" from __future__ import annotations from collections.abc import Mapping from typing import Any, cast -from aiohttp import ClientError +from aiohttp import ClientConnectorCertificateError, ClientError from pyoverkiz.client import OverkizClient -from pyoverkiz.const import SUPPORTED_SERVERS +from pyoverkiz.const import SERVERS_WITH_LOCAL_API, SUPPORTED_SERVERS +from pyoverkiz.enums import APIType, Server from pyoverkiz.exceptions import ( BadCredentialsException, CozyTouchBadCredentialsException, MaintenanceException, + NotSuchTokenException, TooManyAttemptsBannedException, TooManyRequestsException, UnknownUserException, ) -from pyoverkiz.models import obfuscate_id +from pyoverkiz.models import OverkizServer +from pyoverkiz.obfuscate import obfuscate_id +from pyoverkiz.utils import generate_local_server, is_overkiz_gateway import voluptuous as vol from homeassistant import config_entries from homeassistant.components import dhcp, zeroconf from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, + CONF_VERIFY_SSL, +) from homeassistant.data_entry_flow import FlowResult +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_create_clientsession -from .const import CONF_HUB, DEFAULT_HUB, DOMAIN, LOGGER +from .const import CONF_API_TYPE, CONF_HUB, DEFAULT_SERVER, DOMAIN, LOGGER + + +class DeveloperModeDisabled(HomeAssistantError): + """Error to indicate Somfy Developer Mode is disabled.""" class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): @@ -33,46 +48,103 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 1 - _config_entry: ConfigEntry | None - _default_user: None | str - _default_hub: str + _reauth_entry: ConfigEntry | None = None + _api_type: APIType = APIType.CLOUD + _user: str | None = None + _server: str = DEFAULT_SERVER + _host: str = "gateway-xxxx-xxxx-xxxx.local:8443" - def __init__(self) -> None: - """Initialize Overkiz Config Flow.""" - super().__init__() - - self._config_entry = None - self._default_user = None - self._default_hub = DEFAULT_HUB - - async def async_validate_input(self, user_input: dict[str, Any]) -> None: + async def async_validate_input(self, user_input: dict[str, Any]) -> dict[str, Any]: """Validate user credentials.""" - username = user_input[CONF_USERNAME] - password = user_input[CONF_PASSWORD] - server = SUPPORTED_SERVERS[user_input[CONF_HUB]] - session = async_create_clientsession(self.hass) + user_input[CONF_API_TYPE] = self._api_type - client = OverkizClient( - username=username, password=password, server=server, session=session + client = self._create_cloud_client( + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + server=SUPPORTED_SERVERS[user_input[CONF_HUB]], ) - await client.login(register_event_listener=False) - # Set first gateway id as unique id + # For Local API, we create and activate a local token + if self._api_type == APIType.LOCAL: + user_input[CONF_TOKEN] = await self._create_local_api_token( + cloud_client=client, + host=user_input[CONF_HOST], + verify_ssl=user_input[CONF_VERIFY_SSL], + ) + + # Set main gateway id as unique id if gateways := await client.get_gateways(): - gateway_id = gateways[0].id - await self.async_set_unique_id(gateway_id) + for gateway in gateways: + if is_overkiz_gateway(gateway.id): + gateway_id = gateway.id + await self.async_set_unique_id(gateway_id) + + return user_input async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> FlowResult: """Handle the initial step via config flow.""" - errors = {} + if user_input: + self._server = user_input[CONF_HUB] + + # Some Overkiz hubs do support a local API + # Users can choose between local or cloud API. + if self._server in SERVERS_WITH_LOCAL_API: + return await self.async_step_local_or_cloud() + + return await self.async_step_cloud() + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_HUB, default=self._server): vol.In( + {key: hub.name for key, hub in SUPPORTED_SERVERS.items()} + ), + } + ), + ) + + async def async_step_local_or_cloud( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Users can choose between local API or cloud API via config flow.""" + if user_input: + self._api_type = user_input[CONF_API_TYPE] + + if self._api_type == APIType.LOCAL: + return await self.async_step_local() + + return await self.async_step_cloud() + + return self.async_show_form( + step_id="local_or_cloud", + data_schema=vol.Schema( + { + vol.Required(CONF_API_TYPE): vol.In( + { + APIType.LOCAL: "Local API", + APIType.CLOUD: "Cloud API", + } + ), + } + ), + ) + + async def async_step_cloud( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle the cloud authentication step via config flow.""" + errors: dict[str, str] = {} description_placeholders = {} if user_input: - self._default_user = user_input[CONF_USERNAME] - self._default_hub = user_input[CONF_HUB] + self._user = user_input[CONF_USERNAME] + + # inherit the server from previous step + user_input[CONF_HUB] = self._server try: await self.async_validate_input(user_input) @@ -81,7 +153,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): except BadCredentialsException as exception: # If authentication with CozyTouch auth server is valid, but token is invalid # for Overkiz API server, the hardware is not supported. - if user_input[CONF_HUB] == "atlantic_cozytouch" and not isinstance( + if user_input[CONF_HUB] == Server.ATLANTIC_COZYTOUCH and not isinstance( exception, CozyTouchBadCredentialsException ): description_placeholders["unsupported_device"] = "CozyTouch" @@ -99,26 +171,26 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): # the Overkiz API server. Login will return unknown user. description_placeholders["unsupported_device"] = "Somfy Protect" errors["base"] = "unsupported_hardware" - except Exception as exception: # pylint: disable=broad-except + except Exception: # pylint: disable=broad-except errors["base"] = "unknown" - LOGGER.exception(exception) + LOGGER.exception("Unknown error") else: - if self._config_entry: - if self._config_entry.unique_id != self.unique_id: + if self._reauth_entry: + if self._reauth_entry.unique_id != self.unique_id: return self.async_abort(reason="reauth_wrong_account") # Update existing entry during reauth self.hass.config_entries.async_update_entry( - self._config_entry, + self._reauth_entry, data={ - **self._config_entry.data, + **self._reauth_entry.data, **user_input, }, ) self.hass.async_create_task( self.hass.config_entries.async_reload( - self._config_entry.entry_id + self._reauth_entry.entry_id ) ) @@ -132,14 +204,96 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="user", + step_id="cloud", data_schema=vol.Schema( { - vol.Required(CONF_USERNAME, default=self._default_user): str, + vol.Required(CONF_USERNAME, default=self._user): str, vol.Required(CONF_PASSWORD): str, - vol.Required(CONF_HUB, default=self._default_hub): vol.In( - {key: hub.name for key, hub in SUPPORTED_SERVERS.items()} - ), + } + ), + description_placeholders=description_placeholders, + errors=errors, + ) + + async def async_step_local( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle the local authentication step via config flow.""" + errors = {} + description_placeholders = {} + + if user_input: + self._host = user_input[CONF_HOST] + self._user = user_input[CONF_USERNAME] + + # inherit the server from previous step + user_input[CONF_HUB] = self._server + + try: + user_input = await self.async_validate_input(user_input) + except TooManyRequestsException: + errors["base"] = "too_many_requests" + except BadCredentialsException: + errors["base"] = "invalid_auth" + except ClientConnectorCertificateError as exception: + errors["base"] = "certificate_verify_failed" + LOGGER.debug(exception) + except (TimeoutError, ClientError) as exception: + errors["base"] = "cannot_connect" + LOGGER.debug(exception) + except MaintenanceException: + errors["base"] = "server_in_maintenance" + except TooManyAttemptsBannedException: + errors["base"] = "too_many_attempts" + except NotSuchTokenException: + errors["base"] = "no_such_token" + except DeveloperModeDisabled: + errors["base"] = "developer_mode_disabled" + except UnknownUserException: + # Somfy Protect accounts are not supported since they don't use + # the Overkiz API server. Login will return unknown user. + description_placeholders["unsupported_device"] = "Somfy Protect" + errors["base"] = "unsupported_hardware" + except Exception: # pylint: disable=broad-except + errors["base"] = "unknown" + LOGGER.exception("Unknown error") + else: + if self._reauth_entry: + if self._reauth_entry.unique_id != self.unique_id: + return self.async_abort(reason="reauth_wrong_account") + + # Update existing entry during reauth + self.hass.config_entries.async_update_entry( + self._reauth_entry, + data={ + **self._reauth_entry.data, + **user_input, + }, + ) + + self.hass.async_create_task( + self.hass.config_entries.async_reload( + self._reauth_entry.entry_id + ) + ) + + return self.async_abort(reason="reauth_successful") + + # Create new entry + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=user_input[CONF_HOST], data=user_input + ) + + return self.async_show_form( + step_id="local", + data_schema=vol.Schema( + { + vol.Required(CONF_HOST, default=self._host): str, + vol.Required(CONF_USERNAME, default=self._user): str, + vol.Required(CONF_PASSWORD): str, + vol.Required(CONF_VERIFY_SSL, default=True): bool, } ), description_placeholders=description_placeholders, @@ -150,6 +304,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle DHCP discovery.""" hostname = discovery_info.hostname gateway_id = hostname[8:22] + self._host = f"gateway-{gateway_id}.local:8443" LOGGER.debug("DHCP discovery detected gateway %s", obfuscate_id(gateway_id)) return await self._process_discovery(gateway_id) @@ -160,8 +315,22 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle ZeroConf discovery.""" properties = discovery_info.properties gateway_id = properties["gateway_pin"] + hostname = discovery_info.hostname + + LOGGER.debug( + "ZeroConf discovery detected gateway %s on %s (%s)", + obfuscate_id(gateway_id), + hostname, + discovery_info.type, + ) + + if discovery_info.type == "_kizbox._tcp.local.": + self._host = f"gateway-{gateway_id}.local:8443" + + if discovery_info.type == "_kizboxdev._tcp.local.": + self._host = f"{discovery_info.hostname[:-1]}:{discovery_info.port}" + self._api_type = APIType.LOCAL - LOGGER.debug("ZeroConf discovery detected gateway %s", obfuscate_id(gateway_id)) return await self._process_discovery(gateway_id) async def _process_discovery(self, gateway_id: str) -> FlowResult: @@ -174,16 +343,72 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult: """Handle reauth.""" - self._config_entry = cast( + self._reauth_entry = cast( ConfigEntry, self.hass.config_entries.async_get_entry(self.context["entry_id"]), ) self.context["title_placeholders"] = { - "gateway_id": self._config_entry.unique_id + "gateway_id": self._reauth_entry.unique_id } - self._default_user = self._config_entry.data[CONF_USERNAME] - self._default_hub = self._config_entry.data[CONF_HUB] + self._user = self._reauth_entry.data[CONF_USERNAME] + self._server = self._reauth_entry.data[CONF_HUB] + self._api_type = self._reauth_entry.data[CONF_API_TYPE] + + if self._reauth_entry.data[CONF_API_TYPE] == APIType.LOCAL: + self._host = self._reauth_entry.data[CONF_HOST] return await self.async_step_user(dict(entry_data)) + + def _create_cloud_client( + self, username: str, password: str, server: OverkizServer + ) -> OverkizClient: + session = async_create_clientsession(self.hass) + client = OverkizClient( + username=username, password=password, server=server, session=session + ) + + return client + + async def _create_local_api_token( + self, cloud_client: OverkizClient, host: str, verify_ssl: bool + ) -> str: + """Create local API token.""" + # Create session on Somfy cloud server to generate an access token for local API + gateways = await cloud_client.get_gateways() + + gateway_id = "" + for gateway in gateways: + # Overkiz can return multiple gateways, but we only can generate a token + # for the main gateway. + if is_overkiz_gateway(gateway.id): + gateway_id = gateway.id + + developer_mode = await cloud_client.get_setup_option( + f"developerMode-{gateway_id}" + ) + + if developer_mode is None: + raise DeveloperModeDisabled + + token = await cloud_client.generate_local_token(gateway_id) + await cloud_client.activate_local_token( + gateway_id=gateway_id, token=token, label="Home Assistant/local" + ) + + session = async_create_clientsession(self.hass, verify_ssl=verify_ssl) + + # Local API + local_client = OverkizClient( + username="", + password="", + token=token, + session=session, + server=generate_local_server(host=host), + verify_ssl=verify_ssl, + ) + + await local_client.login() + + return token diff --git a/homeassistant/components/overkiz/const.py b/homeassistant/components/overkiz/const.py index 91346b63ce0..0f30f64444b 100644 --- a/homeassistant/components/overkiz/const.py +++ b/homeassistant/components/overkiz/const.py @@ -5,7 +5,13 @@ from datetime import timedelta import logging from typing import Final -from pyoverkiz.enums import MeasuredValueType, OverkizCommandParam, UIClass, UIWidget +from pyoverkiz.enums import ( + MeasuredValueType, + OverkizCommandParam, + Server, + UIClass, + UIWidget, +) from homeassistant.const import ( CONCENTRATION_PARTS_PER_BILLION, @@ -31,8 +37,10 @@ from homeassistant.const import ( DOMAIN: Final = "overkiz" LOGGER: logging.Logger = logging.getLogger(__package__) +CONF_API_TYPE: Final = "api_type" CONF_HUB: Final = "hub" -DEFAULT_HUB: Final = "somfy_europe" +DEFAULT_SERVER: Final = Server.SOMFY_EUROPE +DEFAULT_HOST: Final = "gateway-xxxx-xxxx-xxxx.local:8443" UPDATE_INTERVAL: Final = timedelta(seconds=30) UPDATE_INTERVAL_ALL_ASSUMED_STATE: Final = timedelta(minutes=60) @@ -91,6 +99,7 @@ OVERKIZ_DEVICE_TO_PLATFORM: dict[UIClass | UIWidget, Platform | None] = { UIWidget.ATLANTIC_PASS_APC_ZONE_CONTROL: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.DOMESTIC_HOT_WATER_PRODUCTION: Platform.WATER_HEATER, # widgetName, uiClass is WaterHeatingSystem (not supported) UIWidget.DOMESTIC_HOT_WATER_TANK: Platform.SWITCH, # widgetName, uiClass is WaterHeatingSystem (not supported) + UIWidget.HITACHI_AIR_TO_AIR_HEAT_PUMP: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.HITACHI_DHW: Platform.WATER_HEATER, # widgetName, uiClass is HitachiHeatingSystem (not supported) UIWidget.MY_FOX_ALARM_CONTROLLER: Platform.ALARM_CONTROL_PANEL, # widgetName, uiClass is Alarm (not supported) UIWidget.MY_FOX_SECURITY_CAMERA: Platform.SWITCH, # widgetName, uiClass is Camera (not supported) diff --git a/homeassistant/components/overkiz/coordinator.py b/homeassistant/components/overkiz/coordinator.py index e5079b3d3b8..4630af8bbf8 100644 --- a/homeassistant/components/overkiz/coordinator.py +++ b/homeassistant/components/overkiz/coordinator.py @@ -6,7 +6,7 @@ from datetime import timedelta import logging from typing import Any -from aiohttp import ServerDisconnectedError +from aiohttp import ClientConnectorError, ServerDisconnectedError from pyoverkiz.client import OverkizClient from pyoverkiz.enums import EventName, ExecutionState, Protocol from pyoverkiz.exceptions import ( @@ -79,7 +79,7 @@ class OverkizDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Device]]): raise UpdateFailed("Server is down for maintenance.") from exception except InvalidEventListenerIdException as exception: raise UpdateFailed(exception) from exception - except TimeoutError as exception: + except (TimeoutError, ClientConnectorError) as exception: raise UpdateFailed("Failed to connect.") from exception except (ServerDisconnectedError, NotAuthenticatedException): self.executions = {} diff --git a/homeassistant/components/overkiz/cover_entities/generic_cover.py b/homeassistant/components/overkiz/cover_entities/generic_cover.py index b418bba9e41..f4a8a6a0d45 100644 --- a/homeassistant/components/overkiz/cover_entities/generic_cover.py +++ b/homeassistant/components/overkiz/cover_entities/generic_cover.py @@ -27,12 +27,18 @@ COMMANDS_OPEN: list[OverkizCommand] = [ OverkizCommand.OPEN, OverkizCommand.UP, ] -COMMANDS_OPEN_TILT: list[OverkizCommand] = [OverkizCommand.OPEN_SLATS] +COMMANDS_OPEN_TILT: list[OverkizCommand] = [ + OverkizCommand.OPEN_SLATS, + OverkizCommand.TILT_DOWN, +] COMMANDS_CLOSE: list[OverkizCommand] = [ OverkizCommand.CLOSE, OverkizCommand.DOWN, ] -COMMANDS_CLOSE_TILT: list[OverkizCommand] = [OverkizCommand.CLOSE_SLATS] +COMMANDS_CLOSE_TILT: list[OverkizCommand] = [ + OverkizCommand.CLOSE_SLATS, + OverkizCommand.TILT_UP, +] COMMANDS_SET_TILT_POSITION: list[OverkizCommand] = [OverkizCommand.SET_ORIENTATION] diff --git a/homeassistant/components/overkiz/diagnostics.py b/homeassistant/components/overkiz/diagnostics.py index 77ca0227579..cb8cf6eb22f 100644 --- a/homeassistant/components/overkiz/diagnostics.py +++ b/homeassistant/components/overkiz/diagnostics.py @@ -3,6 +3,7 @@ from __future__ import annotations from typing import Any +from pyoverkiz.enums import APIType from pyoverkiz.obfuscate import obfuscate_id from homeassistant.config_entries import ConfigEntry @@ -10,7 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry from . import HomeAssistantOverkizData -from .const import CONF_HUB, DOMAIN +from .const import CONF_API_TYPE, CONF_HUB, DOMAIN async def async_get_config_entry_diagnostics( @@ -23,11 +24,16 @@ async def async_get_config_entry_diagnostics( data = { "setup": await client.get_diagnostic_data(), "server": entry.data[CONF_HUB], - "execution_history": [ - repr(execution) for execution in await client.get_execution_history() - ], + "api_type": entry.data.get(CONF_API_TYPE, APIType.CLOUD), } + # Only Overkiz cloud servers expose an endpoint with execution history + if client.api_type == APIType.CLOUD: + execution_history = [ + repr(execution) for execution in await client.get_execution_history() + ] + data["execution_history"] = execution_history + return data @@ -49,11 +55,15 @@ async def async_get_device_diagnostics( }, "setup": await client.get_diagnostic_data(), "server": entry.data[CONF_HUB], - "execution_history": [ + "api_type": entry.data.get(CONF_API_TYPE, APIType.CLOUD), + } + + # Only Overkiz cloud servers expose an endpoint with execution history + if client.api_type == APIType.CLOUD: + data["execution_history"] = [ repr(execution) for execution in await client.get_execution_history() if any(command.device_url == device_url for command in execution.commands) - ], - } + ] return data diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index cc9a410392a..e5c1665b2e4 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -11,13 +11,17 @@ ], "documentation": "https://www.home-assistant.io/integrations/overkiz", "integration_type": "hub", - "iot_class": "cloud_polling", + "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.13.2"], + "requirements": ["pyoverkiz==1.13.3"], "zeroconf": [ { "type": "_kizbox._tcp.local.", "name": "gateway*" + }, + { + "type": "_kizboxdev._tcp.local.", + "name": "gateway*" } ] } diff --git a/homeassistant/components/overkiz/strings.json b/homeassistant/components/overkiz/strings.json index 82d29a7534a..2a549f1c24d 100644 --- a/homeassistant/components/overkiz/strings.json +++ b/homeassistant/components/overkiz/strings.json @@ -3,18 +3,40 @@ "flow_title": "Gateway: {gateway_id}", "step": { "user": { - "description": "The Overkiz platform is used by various vendors like Somfy (Connexoon / TaHoma), Hitachi (Hi Kumo), Rexel (Energeasy Connect) and Atlantic (Cozytouch). Enter your application credentials and select your hub.", + "description": "Select your server. The Overkiz platform is used by various vendors like Somfy (Connexoon / TaHoma), Hitachi (Hi Kumo) and Atlantic (Cozytouch).", + "data": { + "hub": "Server" + } + }, + "local_or_cloud": { + "description": "Choose between local or cloud API. Local API supports TaHoma Connexoon, TaHoma v2, and TaHoma Switch. Climate devices are not supported in local API.", + "data": { + "api_type": "API type" + } + }, + "cloud": { + "description": "Enter your application credentials.", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "local": { + "description": "By activating the [Developer Mode of your TaHoma box](https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started), you can authorize third-party software (like Home Assistant) to connect to it via your local network. \n\n After activation, enter your application credentials and change the host to include your gateway-pin or enter the IP address of your gateway.", "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "hub": "Hub" + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" } } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "certificate_verify_failed": "Cannot connect to host, certificate verify failed.", + "developer_mode_disabled": "Developer Mode disabled. Activate the Developer Mode of your Somfy TaHoma box first.", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "no_such_token": "Cannot create a token for this gateway. Please confirm if the account is linked to this gateway.", "server_in_maintenance": "Server is down for maintenance", "too_many_attempts": "Too many attempts with an invalid token, temporarily banned", "too_many_requests": "Too many requests, try again later", diff --git a/homeassistant/components/p1_monitor/manifest.json b/homeassistant/components/p1_monitor/manifest.json index 3ed5589e577..0dfe1f3a46c 100644 --- a/homeassistant/components/p1_monitor/manifest.json +++ b/homeassistant/components/p1_monitor/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["p1monitor"], "quality_scale": "platinum", - "requirements": ["p1monitor==2.1.1"] + "requirements": ["p1monitor==3.0.0"] } diff --git a/homeassistant/components/peco/__init__.py b/homeassistant/components/peco/__init__.py index ad74200dace..bcdc4195100 100644 --- a/homeassistant/components/peco/__init__.py +++ b/homeassistant/components/peco/__init__.py @@ -5,7 +5,14 @@ from dataclasses import dataclass from datetime import timedelta from typing import Final -from peco import AlertResults, BadJSONError, HttpError, OutageResults, PecoOutageApi +from peco import ( + AlertResults, + BadJSONError, + HttpError, + OutageResults, + PecoOutageApi, + UnresponsiveMeterError, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform @@ -13,9 +20,16 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_COUNTY, DOMAIN, LOGGER, SCAN_INTERVAL +from .const import ( + CONF_COUNTY, + CONF_PHONE_NUMBER, + DOMAIN, + LOGGER, + OUTAGE_SCAN_INTERVAL, + SMART_METER_SCAN_INTERVAL, +) -PLATFORMS: Final = [Platform.SENSOR] +PLATFORMS: Final = [Platform.SENSOR, Platform.BINARY_SENSOR] @dataclass @@ -31,9 +45,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: websession = async_get_clientsession(hass) api = PecoOutageApi() + # Outage Counter Setup county: str = entry.data[CONF_COUNTY] - async def async_update_data() -> PECOCoordinatorData: + async def async_update_outage_data() -> OutageResults: """Fetch data from API.""" try: outages: OutageResults = ( @@ -53,15 +68,42 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass, LOGGER, name="PECO Outage Count", - update_method=async_update_data, - update_interval=timedelta(minutes=SCAN_INTERVAL), + update_method=async_update_outage_data, + update_interval=timedelta(minutes=OUTAGE_SCAN_INTERVAL), ) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {"outage_count": coordinator} + if phone_number := entry.data.get(CONF_PHONE_NUMBER): + # Smart Meter Setup] + + async def async_update_meter_data() -> bool: + """Fetch data from API.""" + try: + data: bool = await api.meter_check(phone_number, websession) + except UnresponsiveMeterError as err: + raise UpdateFailed("Unresponsive meter") from err + except HttpError as err: + raise UpdateFailed(f"Error fetching data: {err}") from err + except BadJSONError as err: + raise UpdateFailed(f"Error parsing data: {err}") from err + return data + + coordinator = DataUpdateCoordinator( + hass, + LOGGER, + name="PECO Smart Meter", + update_method=async_update_meter_data, + update_interval=timedelta(minutes=SMART_METER_SCAN_INTERVAL), + ) + + await coordinator.async_config_entry_first_refresh() + + hass.data[DOMAIN][entry.entry_id]["smart_meter"] = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/peco/binary_sensor.py b/homeassistant/components/peco/binary_sensor.py new file mode 100644 index 00000000000..7f0402b207f --- /dev/null +++ b/homeassistant/components/peco/binary_sensor.py @@ -0,0 +1,59 @@ +"""Binary sensor for PECO outage counter.""" +from __future__ import annotations + +from typing import Final + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import DOMAIN + +PARALLEL_UPDATES: Final = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up binary sensor for PECO.""" + if "smart_meter" not in hass.data[DOMAIN][config_entry.entry_id]: + return + coordinator: DataUpdateCoordinator[bool] = hass.data[DOMAIN][config_entry.entry_id][ + "smart_meter" + ] + + async_add_entities( + [PecoBinarySensor(coordinator, phone_number=config_entry.data["phone_number"])] + ) + + +class PecoBinarySensor( + CoordinatorEntity[DataUpdateCoordinator[bool]], BinarySensorEntity +): + """Binary sensor for PECO outage counter.""" + + _attr_icon = "mdi:gauge" + _attr_device_class = BinarySensorDeviceClass.POWER + _attr_name = "Meter Status" + + def __init__( + self, coordinator: DataUpdateCoordinator[bool], phone_number: str + ) -> None: + """Initialize binary sensor for PECO.""" + super().__init__(coordinator) + self._attr_unique_id = f"{phone_number}" + + @property + def is_on(self) -> bool: + """Return if the meter has power.""" + return self.coordinator.data diff --git a/homeassistant/components/peco/config_flow.py b/homeassistant/components/peco/config_flow.py index 63ca7f3291a..261cdb031bf 100644 --- a/homeassistant/components/peco/config_flow.py +++ b/homeassistant/components/peco/config_flow.py @@ -1,41 +1,122 @@ """Config flow for PECO Outage Counter integration.""" from __future__ import annotations +import logging from typing import Any +from peco import ( + HttpError, + IncompatibleMeterError, + PecoOutageApi, + UnresponsiveMeterError, +) import voluptuous as vol from homeassistant import config_entries from homeassistant.data_entry_flow import FlowResult +from homeassistant.helpers import config_validation as cv -from .const import CONF_COUNTY, COUNTY_LIST, DOMAIN +from .const import CONF_COUNTY, CONF_PHONE_NUMBER, COUNTY_LIST, DOMAIN STEP_USER_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_COUNTY): vol.In(COUNTY_LIST), + vol.Optional(CONF_PHONE_NUMBER): cv.string, } ) +_LOGGER = logging.getLogger(__name__) + class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for PECO Outage Counter.""" VERSION = 1 + meter_verification: bool = False + meter_data: dict[str, str] = {} + meter_error: dict[str, str] = {} + + async def _verify_meter(self, phone_number: str) -> None: + """Verify if the meter is compatible.""" + + api = PecoOutageApi() + + try: + await api.meter_check(phone_number) + except ValueError: + self.meter_error = {"phone_number": "invalid_phone_number", "type": "error"} + except IncompatibleMeterError: + self.meter_error = {"phone_number": "incompatible_meter", "type": "abort"} + except UnresponsiveMeterError: + self.meter_error = {"phone_number": "unresponsive_meter", "type": "error"} + except HttpError: + self.meter_error = {"phone_number": "http_error", "type": "error"} + + self.hass.async_create_task( + self.hass.config_entries.flow.async_configure(flow_id=self.flow_id) + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> FlowResult: """Handle the initial step.""" + if self.meter_verification is True: + return self.async_show_progress_done(next_step_id="finish_smart_meter") + if user_input is None: return self.async_show_form( - step_id="user", data_schema=STEP_USER_DATA_SCHEMA + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, ) county = user_input[CONF_COUNTY] - await self.async_set_unique_id(county) + if CONF_PHONE_NUMBER not in user_input: + await self.async_set_unique_id(county) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=f"{user_input[CONF_COUNTY].capitalize()} Outage Count", + data=user_input, + ) + + phone_number = user_input[CONF_PHONE_NUMBER] + + await self.async_set_unique_id(f"{county}-{phone_number}") self._abort_if_unique_id_configured() - return self.async_create_entry( - title=f"{county.capitalize()} Outage Count", data=user_input + self.meter_verification = True + + if self.meter_error is not None: + # Clear any previous errors, since the user may have corrected them + self.meter_error = {} + + self.hass.async_create_task(self._verify_meter(phone_number)) + + self.meter_data = user_input + + return self.async_show_progress( + step_id="user", + progress_action="verifying_meter", + ) + + async def async_step_finish_smart_meter( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle the finish smart meter step.""" + if "phone_number" in self.meter_error: + if self.meter_error["type"] == "error": + self.meter_verification = False + return self.async_show_form( + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors={"phone_number": self.meter_error["phone_number"]}, + ) + + return self.async_abort(reason=self.meter_error["phone_number"]) + + return self.async_create_entry( + title=f"{self.meter_data[CONF_COUNTY].capitalize()} - {self.meter_data[CONF_PHONE_NUMBER]}", + data=self.meter_data, ) diff --git a/homeassistant/components/peco/const.py b/homeassistant/components/peco/const.py index b0198ac8761..1df8ae41ecb 100644 --- a/homeassistant/components/peco/const.py +++ b/homeassistant/components/peco/const.py @@ -14,6 +14,8 @@ COUNTY_LIST: Final = [ "TOTAL", ] CONFIG_FLOW_COUNTIES: Final = [{county: county.capitalize()} for county in COUNTY_LIST] -SCAN_INTERVAL: Final = 9 +OUTAGE_SCAN_INTERVAL: Final = 9 # minutes +SMART_METER_SCAN_INTERVAL: Final = 15 # minutes CONF_COUNTY: Final = "county" ATTR_CONTENT: Final = "content" +CONF_PHONE_NUMBER: Final = "phone_number" diff --git a/homeassistant/components/peco/sensor.py b/homeassistant/components/peco/sensor.py index 5be41f7c7e1..935f2b659f9 100644 --- a/homeassistant/components/peco/sensor.py +++ b/homeassistant/components/peco/sensor.py @@ -91,7 +91,7 @@ async def async_setup_entry( ) -> None: """Set up the sensor platform.""" county: str = config_entry.data[CONF_COUNTY] - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = hass.data[DOMAIN][config_entry.entry_id]["outage_count"] async_add_entities( PecoSensor(sensor, county, coordinator) for sensor in SENSOR_LIST diff --git a/homeassistant/components/peco/strings.json b/homeassistant/components/peco/strings.json index 059b2ba71a7..cdf5bb497db 100644 --- a/homeassistant/components/peco/strings.json +++ b/homeassistant/components/peco/strings.json @@ -3,12 +3,26 @@ "step": { "user": { "data": { - "county": "County" + "county": "County", + "phone_number": "Phone Number" + }, + "data_description": { + "county": "County used for outage number retrieval", + "phone_number": "Phone number associated with the PECO account (optional). Adding a phone number adds a binary sensor confirming if your power is out or not, and not an issue with a breaker or an issue on your end." } } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "incompatible_meter": "Your meter is not compatible with smart meter checking." + }, + "progress": { + "verifying_meter": "One moment. Verifying that your meter is compatible. This may take a minute or two." + }, + "error": { + "invalid_phone_number": "Please enter a valid phone number.", + "unresponsive_meter": "Your meter is not responding. Please try again later.", + "http_error": "There was an error communicating with PECO. The issue that is most likely is that you entered an invalid phone number. Please check the phone number or try again later." } }, "entity": { diff --git a/homeassistant/components/permobil/__init__.py b/homeassistant/components/permobil/__init__.py new file mode 100644 index 00000000000..2f3c4c04c50 --- /dev/null +++ b/homeassistant/components/permobil/__init__.py @@ -0,0 +1,63 @@ +"""The MyPermobil integration.""" +from __future__ import annotations + +import logging + +from mypermobil import MyPermobil, MyPermobilClientException + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_CODE, + CONF_EMAIL, + CONF_REGION, + CONF_TOKEN, + CONF_TTL, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed + +from .const import APPLICATION, DOMAIN +from .coordinator import MyPermobilCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up MyPermobil from a config entry.""" + + # create the API object from the config and save it in hass + session = hass.helpers.aiohttp_client.async_get_clientsession() + p_api = MyPermobil( + application=APPLICATION, + session=session, + email=entry.data[CONF_EMAIL], + region=entry.data[CONF_REGION], + code=entry.data[CONF_CODE], + token=entry.data[CONF_TOKEN], + expiration_date=entry.data[CONF_TTL], + ) + try: + p_api.self_authenticate() + except MyPermobilClientException as err: + _LOGGER.error("Error authenticating %s", err) + raise ConfigEntryAuthFailed(f"Config error for {p_api.email}") from err + + # create the coordinator with the API object + coordinator = MyPermobilCoordinator(hass, p_api) + await coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + hass.data[DOMAIN].pop(entry.entry_id) + + return unload_ok diff --git a/homeassistant/components/permobil/config_flow.py b/homeassistant/components/permobil/config_flow.py new file mode 100644 index 00000000000..644ea29d8a3 --- /dev/null +++ b/homeassistant/components/permobil/config_flow.py @@ -0,0 +1,173 @@ +"""Config flow for MyPermobil integration.""" +from __future__ import annotations + +from collections.abc import Mapping +import logging +from typing import Any + +from mypermobil import MyPermobil, MyPermobilAPIException, MyPermobilClientException +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_CODE, CONF_EMAIL, CONF_REGION, CONF_TOKEN, CONF_TTL +from homeassistant.core import HomeAssistant, async_get_hass +from homeassistant.data_entry_flow import FlowResult +from homeassistant.helpers import selector +from homeassistant.helpers.aiohttp_client import async_get_clientsession +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import APPLICATION, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +GET_EMAIL_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig(type=TextSelectorType.EMAIL) + ), + } +) + +GET_TOKEN_SCHEMA = vol.Schema({vol.Required(CONF_CODE): cv.string}) + + +class PermobilConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Permobil config flow.""" + + VERSION = 1 + region_names: dict[str, str] = {} + data: dict[str, str] = {} + + def __init__(self) -> None: + """Initialize flow.""" + hass: HomeAssistant = async_get_hass() + session = async_get_clientsession(hass) + self.p_api = MyPermobil(APPLICATION, session=session) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Invoke when a user initiates a flow via the user interface.""" + errors: dict[str, str] = {} + + if user_input: + try: + self.p_api.set_email(user_input[CONF_EMAIL]) + except MyPermobilClientException: + _LOGGER.exception("Error validating email") + errors["base"] = "invalid_email" + + self.data.update(user_input) + + await self.async_set_unique_id(self.data[CONF_EMAIL]) + self._abort_if_unique_id_configured() + + if errors or not user_input: + return self.async_show_form( + step_id="user", data_schema=GET_EMAIL_SCHEMA, errors=errors + ) + return await self.async_step_region() + + async def async_step_region( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Invoke when a user initiates a flow via the user interface.""" + errors: dict[str, str] = {} + if not user_input: + # fetch the list of regions names and urls from the api + # for the user to select from. + try: + self.region_names = await self.p_api.request_region_names() + _LOGGER.debug( + "region names %s", + ",".join(list(self.region_names.keys())), + ) + except MyPermobilAPIException: + _LOGGER.exception("Error requesting regions") + errors["base"] = "region_fetch_error" + + else: + region_url = self.region_names[user_input[CONF_REGION]] + + self.data[CONF_REGION] = region_url + self.p_api.set_region(region_url) + _LOGGER.debug("region %s", self.p_api.region) + try: + # tell backend to send code to the users email + await self.p_api.request_application_code() + except MyPermobilAPIException: + _LOGGER.exception("Error requesting code") + errors["base"] = "code_request_error" + + if errors or not user_input: + # the error could either be that the fetch region did not pass + # or that the request application code failed + schema = vol.Schema( + { + vol.Required(CONF_REGION): selector.SelectSelector( + selector.SelectSelectorConfig( + options=list(self.region_names.keys()), + mode=selector.SelectSelectorMode.DROPDOWN, + ) + ), + } + ) + return self.async_show_form( + step_id="region", data_schema=schema, errors=errors + ) + + return await self.async_step_email_code() + + async def async_step_email_code( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Second step in config flow to enter the email code.""" + errors: dict[str, str] = {} + + if user_input: + try: + self.p_api.set_code(user_input[CONF_CODE]) + self.data.update(user_input) + token, ttl = await self.p_api.request_application_token() + self.data[CONF_TOKEN] = token + self.data[CONF_TTL] = ttl + except (MyPermobilAPIException, MyPermobilClientException): + # the code did not pass validation by the api client + # or the backend returned an error when trying to validate the code + _LOGGER.exception("Error verifying code") + errors["base"] = "invalid_code" + + if errors or not user_input: + return self.async_show_form( + step_id="email_code", data_schema=GET_TOKEN_SCHEMA, errors=errors + ) + + return self.async_create_entry(title=self.data[CONF_EMAIL], data=self.data) + + async def async_step_reauth(self, user_input: Mapping[str, Any]) -> FlowResult: + """Perform reauth upon an API authentication error.""" + reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + assert reauth_entry + + try: + email: str = reauth_entry.data[CONF_EMAIL] + region: str = reauth_entry.data[CONF_REGION] + self.p_api.set_email(email) + self.p_api.set_region(region) + self.data = { + CONF_EMAIL: email, + CONF_REGION: region, + } + await self.p_api.request_application_code() + except MyPermobilAPIException: + _LOGGER.exception("Error requesting code for reauth") + return self.async_abort(reason="unknown") + + return await self.async_step_email_code() diff --git a/homeassistant/components/permobil/const.py b/homeassistant/components/permobil/const.py new file mode 100644 index 00000000000..fd5fe673f2a --- /dev/null +++ b/homeassistant/components/permobil/const.py @@ -0,0 +1,11 @@ +"""Constants for the MyPermobil integration.""" + +DOMAIN = "permobil" + +APPLICATION = "Home Assistant" + + +BATTERY_ASSUMED_VOLTAGE = 25.0 # This is the average voltage over all states of charge +REGIONS = "regions" +KM = "kilometers" +MILES = "miles" diff --git a/homeassistant/components/permobil/coordinator.py b/homeassistant/components/permobil/coordinator.py new file mode 100644 index 00000000000..3695236cdf0 --- /dev/null +++ b/homeassistant/components/permobil/coordinator.py @@ -0,0 +1,57 @@ +"""DataUpdateCoordinator for permobil integration.""" + +import asyncio +from dataclasses import dataclass +from datetime import timedelta +import logging + +from mypermobil import MyPermobil, MyPermobilAPIException + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class MyPermobilData: + """MyPermobil data stored in the DataUpdateCoordinator.""" + + battery: dict[str, str | float | int | list | dict] + daily_usage: dict[str, str | float | int | list | dict] + records: dict[str, str | float | int | list | dict] + + +class MyPermobilCoordinator(DataUpdateCoordinator[MyPermobilData]): + """MyPermobil coordinator.""" + + def __init__(self, hass: HomeAssistant, p_api: MyPermobil) -> None: + """Initialize my coordinator.""" + super().__init__( + hass, + _LOGGER, + name="permobil", + update_interval=timedelta(minutes=5), + ) + self.p_api = p_api + + async def _async_update_data(self) -> MyPermobilData: + """Fetch data from the 3 API endpoints.""" + try: + async with asyncio.timeout(10): + battery = await self.p_api.get_battery_info() + daily_usage = await self.p_api.get_daily_usage() + records = await self.p_api.get_usage_records() + return MyPermobilData( + battery=battery, + daily_usage=daily_usage, + records=records, + ) + + except MyPermobilAPIException as err: + _LOGGER.exception( + "Error fetching data from MyPermobil API for account %s %s", + self.p_api.email, + err, + ) + raise UpdateFailed from err diff --git a/homeassistant/components/permobil/manifest.json b/homeassistant/components/permobil/manifest.json new file mode 100644 index 00000000000..fd937fc6f8a --- /dev/null +++ b/homeassistant/components/permobil/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "permobil", + "name": "MyPermobil", + "codeowners": ["@IsakNyberg"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/permobil", + "iot_class": "cloud_polling", + "requirements": ["mypermobil==0.1.6"] +} diff --git a/homeassistant/components/permobil/sensor.py b/homeassistant/components/permobil/sensor.py new file mode 100644 index 00000000000..e942aa265b8 --- /dev/null +++ b/homeassistant/components/permobil/sensor.py @@ -0,0 +1,222 @@ +"""Platform for sensor integration.""" +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from mypermobil import ( + BATTERY_AMPERE_HOURS_LEFT, + BATTERY_CHARGE_TIME_LEFT, + BATTERY_DISTANCE_LEFT, + BATTERY_INDOOR_DRIVE_TIME, + BATTERY_MAX_AMPERE_HOURS, + BATTERY_MAX_DISTANCE_LEFT, + BATTERY_STATE_OF_CHARGE, + BATTERY_STATE_OF_HEALTH, + RECORDS_SEATING, + USAGE_ADJUSTMENTS, + USAGE_DISTANCE, +) + +from homeassistant import config_entries +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import PERCENTAGE, UnitOfEnergy, UnitOfLength, UnitOfTime +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import BATTERY_ASSUMED_VOLTAGE, DOMAIN +from .coordinator import MyPermobilCoordinator + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class PermobilRequiredKeysMixin: + """Mixin for required keys.""" + + value_fn: Callable[[Any], float | int] + available_fn: Callable[[Any], bool] + + +@dataclass +class PermobilSensorEntityDescription( + SensorEntityDescription, PermobilRequiredKeysMixin +): + """Describes Permobil sensor entity.""" + + +SENSOR_DESCRIPTIONS: tuple[PermobilSensorEntityDescription, ...] = ( + PermobilSensorEntityDescription( + # Current battery as a percentage + value_fn=lambda data: data.battery[BATTERY_STATE_OF_CHARGE[0]], + available_fn=lambda data: BATTERY_STATE_OF_CHARGE[0] in data.battery, + key="state_of_charge", + translation_key="state_of_charge", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + ), + PermobilSensorEntityDescription( + # Current battery health as a percentage of original capacity + value_fn=lambda data: data.battery[BATTERY_STATE_OF_HEALTH[0]], + available_fn=lambda data: BATTERY_STATE_OF_HEALTH[0] in data.battery, + key="state_of_health", + translation_key="state_of_health", + icon="mdi:battery-heart-variant", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + PermobilSensorEntityDescription( + # Time until fully charged (displays 0 if not charging) + value_fn=lambda data: data.battery[BATTERY_CHARGE_TIME_LEFT[0]], + available_fn=lambda data: BATTERY_CHARGE_TIME_LEFT[0] in data.battery, + key="charge_time_left", + translation_key="charge_time_left", + icon="mdi:battery-clock", + native_unit_of_measurement=UnitOfTime.HOURS, + device_class=SensorDeviceClass.DURATION, + ), + PermobilSensorEntityDescription( + # Distance possible on current change (km) + value_fn=lambda data: data.battery[BATTERY_DISTANCE_LEFT[0]], + available_fn=lambda data: BATTERY_DISTANCE_LEFT[0] in data.battery, + key="distance_left", + translation_key="distance_left", + icon="mdi:map-marker-distance", + native_unit_of_measurement=UnitOfLength.KILOMETERS, + device_class=SensorDeviceClass.DISTANCE, + ), + PermobilSensorEntityDescription( + # Drive time possible on current charge + value_fn=lambda data: data.battery[BATTERY_INDOOR_DRIVE_TIME[0]], + available_fn=lambda data: BATTERY_INDOOR_DRIVE_TIME[0] in data.battery, + key="indoor_drive_time", + translation_key="indoor_drive_time", + native_unit_of_measurement=UnitOfTime.HOURS, + device_class=SensorDeviceClass.DURATION, + ), + PermobilSensorEntityDescription( + # Watt hours the battery can store given battery health + value_fn=lambda data: data.battery[BATTERY_MAX_AMPERE_HOURS[0]] + * BATTERY_ASSUMED_VOLTAGE, + available_fn=lambda data: BATTERY_MAX_AMPERE_HOURS[0] in data.battery, + key="max_watt_hours", + translation_key="max_watt_hours", + icon="mdi:lightning-bolt", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY_STORAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + PermobilSensorEntityDescription( + # Current amount of watt hours in battery + value_fn=lambda data: data.battery[BATTERY_AMPERE_HOURS_LEFT[0]] + * BATTERY_ASSUMED_VOLTAGE, + available_fn=lambda data: BATTERY_AMPERE_HOURS_LEFT[0] in data.battery, + key="watt_hours_left", + translation_key="watt_hours_left", + icon="mdi:lightning-bolt", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY_STORAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + PermobilSensorEntityDescription( + # Distance that can be traveled with full charge given battery health (km) + value_fn=lambda data: data.battery[BATTERY_MAX_DISTANCE_LEFT[0]], + available_fn=lambda data: BATTERY_MAX_DISTANCE_LEFT[0] in data.battery, + key="max_distance_left", + translation_key="max_distance_left", + icon="mdi:map-marker-distance", + native_unit_of_measurement=UnitOfLength.KILOMETERS, + device_class=SensorDeviceClass.DISTANCE, + ), + PermobilSensorEntityDescription( + # Distance traveled today monotonically increasing, resets every 24h (km) + value_fn=lambda data: data.daily_usage[USAGE_DISTANCE[0]], + available_fn=lambda data: USAGE_DISTANCE[0] in data.daily_usage, + key="usage_distance", + translation_key="usage_distance", + icon="mdi:map-marker-distance", + native_unit_of_measurement=UnitOfLength.KILOMETERS, + device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + PermobilSensorEntityDescription( + # Number of adjustments monotonically increasing, resets every 24h + value_fn=lambda data: data.daily_usage[USAGE_ADJUSTMENTS[0]], + available_fn=lambda data: USAGE_ADJUSTMENTS[0] in data.daily_usage, + key="usage_adjustments", + translation_key="usage_adjustments", + icon="mdi:seat-recline-extra", + native_unit_of_measurement="adjustments", + state_class=SensorStateClass.TOTAL_INCREASING, + ), + PermobilSensorEntityDescription( + # Largest number of adjustemnts in a single 24h period, never resets + value_fn=lambda data: data.records[RECORDS_SEATING[0]], + available_fn=lambda data: RECORDS_SEATING[0] in data.records, + key="record_adjustments", + translation_key="record_adjustments", + icon="mdi:seat-recline-extra", + native_unit_of_measurement="adjustments", + state_class=SensorStateClass.TOTAL_INCREASING, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: config_entries.ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Create sensors from a config entry created in the integrations UI.""" + + coordinator: MyPermobilCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + PermobilSensor(coordinator=coordinator, description=description) + for description in SENSOR_DESCRIPTIONS + ) + + +class PermobilSensor(CoordinatorEntity[MyPermobilCoordinator], SensorEntity): + """Representation of a Sensor. + + This implements the common functions of all sensors. + """ + + _attr_has_entity_name = True + _attr_suggested_display_precision = 0 + entity_description: PermobilSensorEntityDescription + _available = True + + def __init__( + self, + coordinator: MyPermobilCoordinator, + description: PermobilSensorEntityDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator=coordinator) + self.entity_description = description + self._attr_unique_id = ( + f"{coordinator.p_api.email}_{self.entity_description.key}" + ) + + @property + def available(self) -> bool: + """Return True if the sensor has value.""" + return super().available and self.entity_description.available_fn( + self.coordinator.data + ) + + @property + def native_value(self) -> float | int: + """Return the value of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/permobil/strings.json b/homeassistant/components/permobil/strings.json new file mode 100644 index 00000000000..b0b630eff08 --- /dev/null +++ b/homeassistant/components/permobil/strings.json @@ -0,0 +1,70 @@ +{ + "config": { + "step": { + "user": { + "data": { + "email": "Enter your permobil email" + } + }, + "email_code": { + "description": "Enter the code that was sent to your email.", + "data": { + "code": "Email code" + } + }, + "region": { + "description": "Select the region of your account.", + "data": { + "code": "Region" + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "error": { + "unknown": "Unexpected error, more information in the logs", + "region_fetch_error": "Error fetching regions", + "code_request_error": "Error requesting application code", + "invalid_email": "Invalid email", + "invalid_code": "The code you gave is incorrect" + } + }, + "entity": { + "sensor": { + "state_of_charge": { + "name": "Battery charge" + }, + "state_of_health": { + "name": "Battery health" + }, + "charge_time_left": { + "name": "Charge time left" + }, + "distance_left": { + "name": "Distance left" + }, + "indoor_drive_time": { + "name": "Indoor drive time" + }, + "max_watt_hours": { + "name": "Battery max watt hours" + }, + "watt_hours_left": { + "name": "Watt hours left" + }, + "max_distance_left": { + "name": "Full charge distance" + }, + "usage_distance": { + "name": "Distance traveled" + }, + "usage_adjustments": { + "name": "Number of adjustments" + }, + "record_adjustments": { + "name": "Record number of adjustments" + } + } + } +} diff --git a/homeassistant/components/person/__init__.py b/homeassistant/components/person/__init__.py index 49b719a5490..b6f8b5b2db6 100644 --- a/homeassistant/components/person/__init__.py +++ b/homeassistant/components/person/__init__.py @@ -1,9 +1,12 @@ """Support for tracking people.""" from __future__ import annotations +from http import HTTPStatus +from ipaddress import ip_address import logging from typing import Any +from aiohttp import web import voluptuous as vol from homeassistant.auth import EVENT_USER_REMOVED @@ -13,6 +16,7 @@ from homeassistant.components.device_tracker import ( DOMAIN as DEVICE_TRACKER_DOMAIN, SourceType, ) +from homeassistant.components.http.view import HomeAssistantView from homeassistant.const import ( ATTR_EDITABLE, ATTR_ENTITY_ID, @@ -47,10 +51,12 @@ from homeassistant.helpers import ( ) from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import async_track_state_change_event +from homeassistant.helpers.network import is_cloud_connection from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.storage import Store from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass +from homeassistant.util.network import is_local _LOGGER = logging.getLogger(__name__) @@ -385,6 +391,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass, DOMAIN, SERVICE_RELOAD, async_reload_yaml ) + hass.http.register_view(ListPersonsView) + return True @@ -569,3 +577,44 @@ def _get_latest(prev: State | None, curr: State): if prev is None or curr.last_updated > prev.last_updated: return curr return prev + + +class ListPersonsView(HomeAssistantView): + """List all persons if request is made from a local network.""" + + requires_auth = False + url = "/api/person/list" + name = "api:person:list" + + async def get(self, request: web.Request) -> web.Response: + """Return a list of persons if request comes from a local IP.""" + try: + remote_address = ip_address(request.remote) # type: ignore[arg-type] + except ValueError: + return self.json_message( + message="Invalid remote IP", + status_code=HTTPStatus.BAD_REQUEST, + message_code="invalid_remote_ip", + ) + + hass: HomeAssistant = request.app["hass"] + if is_cloud_connection(hass) or not is_local(remote_address): + return self.json_message( + message="Not local", + status_code=HTTPStatus.BAD_REQUEST, + message_code="not_local", + ) + + yaml, storage, _ = hass.data[DOMAIN] + persons = [*yaml.async_items(), *storage.async_items()] + + return self.json( + { + person[ATTR_USER_ID]: { + ATTR_NAME: person[ATTR_NAME], + CONF_PICTURE: person.get(CONF_PICTURE), + } + for person in persons + if person.get(ATTR_USER_ID) + } + ) diff --git a/homeassistant/components/person/manifest.json b/homeassistant/components/person/manifest.json index f6682058dae..7f370be6fbe 100644 --- a/homeassistant/components/person/manifest.json +++ b/homeassistant/components/person/manifest.json @@ -3,7 +3,7 @@ "name": "Person", "after_dependencies": ["device_tracker"], "codeowners": [], - "dependencies": ["image_upload"], + "dependencies": ["image_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/person", "integration_type": "system", "iot_class": "calculated", diff --git a/homeassistant/components/philips_js/__init__.py b/homeassistant/components/philips_js/__init__.py index 969c6c7b837..b81fec90a59 100644 --- a/homeassistant/components/philips_js/__init__.py +++ b/homeassistant/components/philips_js/__init__.py @@ -36,6 +36,7 @@ PLATFORMS = [ Platform.LIGHT, Platform.REMOTE, Platform.SWITCH, + Platform.BINARY_SENSOR, ] LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/philips_js/binary_sensor.py b/homeassistant/components/philips_js/binary_sensor.py new file mode 100644 index 00000000000..1e6c1241aea --- /dev/null +++ b/homeassistant/components/philips_js/binary_sensor.py @@ -0,0 +1,107 @@ +"""Philips TV binary sensors.""" +from __future__ import annotations + +from dataclasses import dataclass + +from haphilipsjs import PhilipsTV + +from homeassistant.components.binary_sensor import ( + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import PhilipsTVDataUpdateCoordinator +from .const import DOMAIN +from .entity import PhilipsJsEntity + + +@dataclass +class PhilipsTVBinarySensorEntityDescription(BinarySensorEntityDescription): + """A entity description for Philips TV binary sensor.""" + + def __init__(self, recording_value, *args, **kwargs) -> None: + """Set up a binary sensor entity description and add additional attributes.""" + super().__init__(*args, **kwargs) + self.recording_value: str = recording_value + + +DESCRIPTIONS = ( + PhilipsTVBinarySensorEntityDescription( + key="recording_ongoing", + translation_key="recording_ongoing", + icon="mdi:record-rec", + recording_value="RECORDING_ONGOING", + ), + PhilipsTVBinarySensorEntityDescription( + key="recording_new", + translation_key="recording_new", + icon="mdi:new-box", + recording_value="RECORDING_NEW", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the configuration entry.""" + coordinator: PhilipsTVDataUpdateCoordinator = hass.data[DOMAIN][ + config_entry.entry_id + ] + + if ( + coordinator.api.json_feature_supported("recordings", "List") + and coordinator.api.api_version == 6 + ): + async_add_entities( + PhilipsTVBinarySensorEntityRecordingType(coordinator, description) + for description in DESCRIPTIONS + ) + + +def _check_for_recording_entry(api: PhilipsTV, entry: str, value: str) -> bool: + """Return True if at least one specified value is available within entry of list.""" + if api.recordings_list is None: + return False + for rec in api.recordings_list["recordings"]: + if rec.get(entry) == value: + return True + return False + + +class PhilipsTVBinarySensorEntityRecordingType(PhilipsJsEntity, BinarySensorEntity): + """A Philips TV binary sensor class, which allows multiple entities given by a BinarySensorEntityDescription.""" + + entity_description: PhilipsTVBinarySensorEntityDescription + + def __init__( + self, + coordinator: PhilipsTVDataUpdateCoordinator, + description: PhilipsTVBinarySensorEntityDescription, + ) -> None: + """Initialize entity class.""" + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + self._attr_device_info = coordinator.device_info + self._attr_is_on = _check_for_recording_entry( + coordinator.api, + "RecordingType", + description.recording_value, + ) + + super().__init__(coordinator) + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator and set is_on true if one specified value is available within given entry of list.""" + self._attr_is_on = _check_for_recording_entry( + self.coordinator.api, + "RecordingType", + self.entity_description.recording_value, + ) + super()._handle_coordinator_update() diff --git a/homeassistant/components/philips_js/strings.json b/homeassistant/components/philips_js/strings.json index 6c738a36df3..3ea632ce436 100644 --- a/homeassistant/components/philips_js/strings.json +++ b/homeassistant/components/philips_js/strings.json @@ -44,6 +44,14 @@ } }, "entity": { + "binary_sensor": { + "recording_new": { + "name": "New recording available" + }, + "recording_ongoing": { + "name": "Recording ongoing" + } + }, "light": { "ambilight": { "name": "Ambilight" diff --git a/homeassistant/components/picnic/__init__.py b/homeassistant/components/picnic/__init__.py index ec7f6e15425..6826d8940ab 100644 --- a/homeassistant/components/picnic/__init__.py +++ b/homeassistant/components/picnic/__init__.py @@ -10,7 +10,7 @@ from .const import CONF_API, CONF_COORDINATOR, CONF_COUNTRY_CODE, DOMAIN from .coordinator import PicnicUpdateCoordinator from .services import async_register_services -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [Platform.SENSOR, Platform.TODO] def create_picnic_client(entry: ConfigEntry): diff --git a/homeassistant/components/picnic/sensor.py b/homeassistant/components/picnic/sensor.py index e7a69e0bf02..507ab82e8e2 100644 --- a/homeassistant/components/picnic/sensor.py +++ b/homeassistant/components/picnic/sensor.py @@ -17,10 +17,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util from .const import ( @@ -44,6 +41,7 @@ from .const import ( SENSOR_SELECTED_SLOT_MIN_ORDER_VALUE, SENSOR_SELECTED_SLOT_START, ) +from .coordinator import PicnicUpdateCoordinator @dataclass @@ -237,7 +235,7 @@ async def async_setup_entry( ) -class PicnicSensor(SensorEntity, CoordinatorEntity): +class PicnicSensor(SensorEntity, CoordinatorEntity[PicnicUpdateCoordinator]): """The CoordinatorEntity subclass representing Picnic sensors.""" _attr_has_entity_name = True @@ -246,7 +244,7 @@ class PicnicSensor(SensorEntity, CoordinatorEntity): def __init__( self, - coordinator: DataUpdateCoordinator[Any], + coordinator: PicnicUpdateCoordinator, config_entry: ConfigEntry, description: PicnicSensorEntityDescription, ) -> None: diff --git a/homeassistant/components/picnic/services.py b/homeassistant/components/picnic/services.py index 3af2a521f8a..b44d4dd5a62 100644 --- a/homeassistant/components/picnic/services.py +++ b/homeassistant/components/picnic/services.py @@ -66,7 +66,7 @@ async def handle_add_product( product_id = call.data.get("product_id") if not product_id: product_id = await hass.async_add_executor_job( - _product_search, api_client, cast(str, call.data["product_name"]) + product_search, api_client, cast(str, call.data["product_name"]) ) if not product_id: @@ -77,8 +77,11 @@ async def handle_add_product( ) -def _product_search(api_client: PicnicAPI, product_name: str) -> None | str: +def product_search(api_client: PicnicAPI, product_name: str | None) -> None | str: """Query the api client for the product name.""" + if product_name is None: + return None + search_result = api_client.search(product_name) if not search_result or "items" not in search_result[0]: diff --git a/homeassistant/components/picnic/strings.json b/homeassistant/components/picnic/strings.json index 0fd107609d1..9a6b7162fd5 100644 --- a/homeassistant/components/picnic/strings.json +++ b/homeassistant/components/picnic/strings.json @@ -21,6 +21,11 @@ } }, "entity": { + "todo": { + "shopping_cart": { + "name": "Shopping cart" + } + }, "sensor": { "cart_items_count": { "name": "Cart items count" diff --git a/homeassistant/components/picnic/todo.py b/homeassistant/components/picnic/todo.py new file mode 100644 index 00000000000..fea99f7403d --- /dev/null +++ b/homeassistant/components/picnic/todo.py @@ -0,0 +1,95 @@ +"""Definition of Picnic shopping cart.""" +from __future__ import annotations + +import logging +from typing import cast + +from homeassistant.components.todo import ( + TodoItem, + TodoItemStatus, + TodoListEntity, + TodoListEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import CONF_COORDINATOR, DOMAIN +from .coordinator import PicnicUpdateCoordinator +from .services import product_search + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Picnic shopping cart todo platform config entry.""" + picnic_coordinator = hass.data[DOMAIN][config_entry.entry_id][CONF_COORDINATOR] + + async_add_entities([PicnicCart(picnic_coordinator, config_entry)]) + + +class PicnicCart(TodoListEntity, CoordinatorEntity[PicnicUpdateCoordinator]): + """A Picnic Shopping Cart TodoListEntity.""" + + _attr_has_entity_name = True + _attr_icon = "mdi:cart" + _attr_supported_features = TodoListEntityFeature.CREATE_TODO_ITEM + _attr_translation_key = "shopping_cart" + + def __init__( + self, + coordinator: PicnicUpdateCoordinator, + config_entry: ConfigEntry, + ) -> None: + """Initialize PicnicCart.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, cast(str, config_entry.unique_id))}, + manufacturer="Picnic", + model=config_entry.unique_id, + ) + self._attr_unique_id = f"{config_entry.unique_id}-cart" + + @property + def todo_items(self) -> list[TodoItem] | None: + """Get the current set of items in cart items.""" + if self.coordinator.data is None: + return None + + _LOGGER.debug(self.coordinator.data["cart_data"]["items"]) + + items = [] + for item in self.coordinator.data["cart_data"]["items"]: + for article in item["items"]: + items.append( + TodoItem( + summary=f"{article['name']} ({article['unit_quantity']})", + uid=f"{item['id']}-{article['id']}", + status=TodoItemStatus.NEEDS_ACTION, # We set 'NEEDS_ACTION' so they count as state + ) + ) + + return items + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add item to shopping cart.""" + product_id = await self.hass.async_add_executor_job( + product_search, self.coordinator.picnic_api_client, item.summary + ) + + if not product_id: + raise ServiceValidationError("No product found or no product ID given") + + await self.hass.async_add_executor_job( + self.coordinator.picnic_api_client.add_product, product_id, 1 + ) + + await self.coordinator.async_refresh() diff --git a/homeassistant/components/ping/__init__.py b/homeassistant/components/ping/__init__.py index 26dd8113231..81df1401f91 100644 --- a/homeassistant/components/ping/__init__.py +++ b/homeassistant/components/ping/__init__.py @@ -4,18 +4,22 @@ from __future__ import annotations from dataclasses import dataclass import logging -from icmplib import SocketPermissionError, ping as icmp_ping +from icmplib import SocketPermissionError, async_ping +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN, PLATFORMS +from .const import CONF_PING_COUNT, DOMAIN +from .coordinator import PingUpdateCoordinator +from .helpers import PingDataICMPLib, PingDataSubProcess _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.platform_only_config_schema(DOMAIN) +PLATFORMS = [Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER] @dataclass(slots=True) @@ -23,26 +27,68 @@ class PingDomainData: """Dataclass to store privileged status.""" privileged: bool | None + coordinators: dict[str, PingUpdateCoordinator] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the ping integration.""" - await async_setup_reload_service(hass, DOMAIN, PLATFORMS) hass.data[DOMAIN] = PingDomainData( - privileged=await hass.async_add_executor_job(_can_use_icmp_lib_with_privilege), + privileged=await _can_use_icmp_lib_with_privilege(), + coordinators={}, ) return True -def _can_use_icmp_lib_with_privilege() -> None | bool: +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Ping (ICMP) from a config entry.""" + + data: PingDomainData = hass.data[DOMAIN] + + host: str = entry.options[CONF_HOST] + count: int = int(entry.options[CONF_PING_COUNT]) + ping_cls: type[PingDataICMPLib | PingDataSubProcess] + if data.privileged is None: + ping_cls = PingDataSubProcess + else: + ping_cls = PingDataICMPLib + + coordinator = PingUpdateCoordinator( + hass=hass, ping=ping_cls(hass, host, count, data.privileged) + ) + await coordinator.async_config_entry_first_refresh() + + data.coordinators[entry.entry_id] = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(async_reload_entry)) + + return True + + +async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle an options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + if unload_ok: + # drop coordinator for config entry + hass.data[DOMAIN].coordinators.pop(entry.entry_id) + + return unload_ok + + +async def _can_use_icmp_lib_with_privilege() -> None | bool: """Verify we can create a raw socket.""" try: - icmp_ping("127.0.0.1", count=0, timeout=0, privileged=True) + await async_ping("127.0.0.1", count=0, timeout=0, privileged=True) except SocketPermissionError: try: - icmp_ping("127.0.0.1", count=0, timeout=0, privileged=False) + await async_ping("127.0.0.1", count=0, timeout=0, privileged=False) except SocketPermissionError: _LOGGER.debug( "Cannot use icmplib because privileges are insufficient to create the" diff --git a/homeassistant/components/ping/binary_sensor.py b/homeassistant/components/ping/binary_sensor.py index b120c453195..97636111586 100644 --- a/homeassistant/components/ping/binary_sensor.py +++ b/homeassistant/components/ping/binary_sensor.py @@ -1,7 +1,6 @@ """Tracks the latency of a host by sending ICMP echo requests (ping).""" from __future__ import annotations -from datetime import timedelta import logging from typing import Any @@ -12,34 +11,26 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.const import CONF_HOST, CONF_NAME, STATE_ON -from homeassistant.core import HomeAssistant +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import PingDomainData -from .const import DOMAIN -from .helpers import PingDataICMPLib, PingDataSubProcess +from .const import CONF_IMPORTED_BY, CONF_PING_COUNT, DEFAULT_PING_COUNT, DOMAIN +from .coordinator import PingUpdateCoordinator _LOGGER = logging.getLogger(__name__) - ATTR_ROUND_TRIP_TIME_AVG = "round_trip_time_avg" ATTR_ROUND_TRIP_TIME_MAX = "round_trip_time_max" ATTR_ROUND_TRIP_TIME_MDEV = "round_trip_time_mdev" ATTR_ROUND_TRIP_TIME_MIN = "round_trip_time_min" -CONF_PING_COUNT = "count" - -DEFAULT_NAME = "Ping" -DEFAULT_PING_COUNT = 5 - -SCAN_INTERVAL = timedelta(minutes=5) - -PARALLEL_UPDATES = 50 - PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, @@ -57,75 +48,76 @@ async def async_setup_platform( async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: - """Set up the Ping Binary sensor.""" + """YAML init: import via config flow.""" - data: PingDomainData = hass.data[DOMAIN] + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_IMPORTED_BY: "binary_sensor", **config}, + ) + ) - host: str = config[CONF_HOST] - count: int = config[CONF_PING_COUNT] - name: str = config.get(CONF_NAME, f"{DEFAULT_NAME} {host}") - privileged: bool | None = data.privileged - ping_cls: type[PingDataSubProcess | PingDataICMPLib] - if privileged is None: - ping_cls = PingDataSubProcess - else: - ping_cls = PingDataICMPLib - - async_add_entities( - [PingBinarySensor(name, ping_cls(hass, host, count, privileged))] + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.6.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Ping", + }, ) -class PingBinarySensor(RestoreEntity, BinarySensorEntity): +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up a Ping config entry.""" + + data: PingDomainData = hass.data[DOMAIN] + + async_add_entities([PingBinarySensor(entry, data.coordinators[entry.entry_id])]) + + +class PingBinarySensor(CoordinatorEntity[PingUpdateCoordinator], BinarySensorEntity): """Representation of a Ping Binary sensor.""" _attr_device_class = BinarySensorDeviceClass.CONNECTIVITY + _attr_available = False - def __init__(self, name: str, ping: PingDataSubProcess | PingDataICMPLib) -> None: + def __init__( + self, config_entry: ConfigEntry, coordinator: PingUpdateCoordinator + ) -> None: """Initialize the Ping Binary sensor.""" - self._attr_available = False - self._attr_name = name - self._ping = ping + super().__init__(coordinator) + + self._attr_name = config_entry.title + self._attr_unique_id = config_entry.entry_id + + # if this was imported just enable it when it was enabled before + if CONF_IMPORTED_BY in config_entry.data: + self._attr_entity_registry_enabled_default = bool( + config_entry.data[CONF_IMPORTED_BY] == "binary_sensor" + ) @property def is_on(self) -> bool: """Return true if the binary sensor is on.""" - return self._ping.is_alive + return self.coordinator.data.is_alive @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the ICMP checo request.""" - if self._ping.data is None: + if self.coordinator.data.data is None: return None return { - ATTR_ROUND_TRIP_TIME_AVG: self._ping.data["avg"], - ATTR_ROUND_TRIP_TIME_MAX: self._ping.data["max"], - ATTR_ROUND_TRIP_TIME_MDEV: self._ping.data["mdev"], - ATTR_ROUND_TRIP_TIME_MIN: self._ping.data["min"], - } - - async def async_update(self) -> None: - """Get the latest data.""" - await self._ping.async_update() - self._attr_available = True - - async def async_added_to_hass(self) -> None: - """Restore previous state on restart to avoid blocking startup.""" - await super().async_added_to_hass() - - last_state = await self.async_get_last_state() - if last_state is not None: - self._attr_available = True - - if last_state is None or last_state.state != STATE_ON: - self._ping.data = None - return - - attributes = last_state.attributes - self._ping.is_alive = True - self._ping.data = { - "min": attributes[ATTR_ROUND_TRIP_TIME_MIN], - "max": attributes[ATTR_ROUND_TRIP_TIME_MAX], - "avg": attributes[ATTR_ROUND_TRIP_TIME_AVG], - "mdev": attributes[ATTR_ROUND_TRIP_TIME_MDEV], + ATTR_ROUND_TRIP_TIME_AVG: self.coordinator.data.data["avg"], + ATTR_ROUND_TRIP_TIME_MAX: self.coordinator.data.data["max"], + ATTR_ROUND_TRIP_TIME_MDEV: self.coordinator.data.data["mdev"], + ATTR_ROUND_TRIP_TIME_MIN: self.coordinator.data.data["min"], } diff --git a/homeassistant/components/ping/config_flow.py b/homeassistant/components/ping/config_flow.py new file mode 100644 index 00000000000..42cdd3f3a77 --- /dev/null +++ b/homeassistant/components/ping/config_flow.py @@ -0,0 +1,107 @@ +"""Config flow for Ping (ICMP) integration.""" +from __future__ import annotations + +from collections.abc import Mapping +import logging +from typing import Any + +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.core import callback +from homeassistant.data_entry_flow import FlowResult +from homeassistant.helpers import selector +from homeassistant.util.network import is_ip_address + +from .const import CONF_IMPORTED_BY, CONF_PING_COUNT, DEFAULT_PING_COUNT, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for Ping.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle the initial step.""" + if user_input is None: + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_HOST): str, + } + ), + ) + + if not is_ip_address(user_input[CONF_HOST]): + self.async_abort(reason="invalid_ip_address") + + self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) + return self.async_create_entry( + title=user_input[CONF_HOST], + data={}, + options={**user_input, CONF_PING_COUNT: DEFAULT_PING_COUNT}, + ) + + async def async_step_import(self, import_info: Mapping[str, Any]) -> FlowResult: + """Import an entry.""" + + to_import = { + CONF_HOST: import_info[CONF_HOST], + CONF_PING_COUNT: import_info[CONF_PING_COUNT], + } + title = import_info.get(CONF_NAME, import_info[CONF_HOST]) + + self._async_abort_entries_match({CONF_HOST: to_import[CONF_HOST]}) + return self.async_create_entry( + title=title, + data={CONF_IMPORTED_BY: import_info[CONF_IMPORTED_BY]}, + options=to_import, + ) + + @staticmethod + @callback + def async_get_options_flow( + config_entry: config_entries.ConfigEntry, + ) -> config_entries.OptionsFlow: + """Create the options flow.""" + return OptionsFlowHandler(config_entry) + + +class OptionsFlowHandler(config_entries.OptionsFlow): + """Handle an options flow for Ping.""" + + def __init__(self, config_entry: config_entries.ConfigEntry) -> None: + """Initialize options flow.""" + self.config_entry = config_entry + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Manage the options.""" + if user_input is not None: + return self.async_create_entry(title="", data=user_input) + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema( + { + vol.Required( + CONF_HOST, default=self.config_entry.options[CONF_HOST] + ): str, + vol.Optional( + CONF_PING_COUNT, + default=self.config_entry.options[CONF_PING_COUNT], + ): selector.NumberSelector( + selector.NumberSelectorConfig( + min=1, max=100, mode=selector.NumberSelectorMode.BOX + ) + ), + } + ), + ) diff --git a/homeassistant/components/ping/const.py b/homeassistant/components/ping/const.py index fd70a9340c2..6ee53ea3d22 100644 --- a/homeassistant/components/ping/const.py +++ b/homeassistant/components/ping/const.py @@ -1,6 +1,5 @@ """Tracks devices by sending a ICMP echo request (ping).""" -from homeassistant.const import Platform # The ping binary and icmplib timeouts are not the same # timeout. ping is an overall timeout, icmplib is the @@ -15,4 +14,7 @@ ICMP_TIMEOUT = 1 PING_ATTEMPTS_COUNT = 3 DOMAIN = "ping" -PLATFORMS = [Platform.BINARY_SENSOR] + +CONF_PING_COUNT = "count" +CONF_IMPORTED_BY = "imported_by" +DEFAULT_PING_COUNT = 5 diff --git a/homeassistant/components/ping/coordinator.py b/homeassistant/components/ping/coordinator.py new file mode 100644 index 00000000000..dadd105b606 --- /dev/null +++ b/homeassistant/components/ping/coordinator.py @@ -0,0 +1,53 @@ +"""DataUpdateCoordinator for the ping integration.""" +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta +import logging +from typing import Any + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .helpers import PingDataICMPLib, PingDataSubProcess + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(slots=True, frozen=True) +class PingResult: + """Dataclass returned by the coordinator.""" + + ip_address: str + is_alive: bool + data: dict[str, Any] | None + + +class PingUpdateCoordinator(DataUpdateCoordinator[PingResult]): + """The Ping update coordinator.""" + + ping: PingDataSubProcess | PingDataICMPLib + + def __init__( + self, + hass: HomeAssistant, + ping: PingDataSubProcess | PingDataICMPLib, + ) -> None: + """Initialize the Ping coordinator.""" + self.ping = ping + + super().__init__( + hass, + _LOGGER, + name=f"Ping {ping.ip_address}", + update_interval=timedelta(minutes=5), + ) + + async def _async_update_data(self) -> PingResult: + """Trigger ping check.""" + await self.ping.async_update() + return PingResult( + ip_address=self.ping.ip_address, + is_alive=self.ping.is_alive, + data=self.ping.data, + ) diff --git a/homeassistant/components/ping/device_tracker.py b/homeassistant/components/ping/device_tracker.py index 9a63a2f844d..ceff1b2e124 100644 --- a/homeassistant/components/ping/device_tracker.py +++ b/homeassistant/components/ping/device_tracker.py @@ -1,39 +1,31 @@ """Tracks devices by sending a ICMP echo request (ping).""" from __future__ import annotations -import asyncio -from datetime import datetime, timedelta import logging -import subprocess -from icmplib import async_multiping import voluptuous as vol from homeassistant.components.device_tracker import ( - CONF_SCAN_INTERVAL, PLATFORM_SCHEMA as BASE_PLATFORM_SCHEMA, - SCAN_INTERVAL, AsyncSeeCallback, + ScannerEntity, SourceType, ) -from homeassistant.const import CONF_HOSTS -from homeassistant.core import HomeAssistant +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.event import async_track_point_in_utc_time +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import dt as dt_util -from homeassistant.util.async_ import gather_with_limited_concurrency -from homeassistant.util.process import kill_subprocess +from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import PingDomainData -from .const import DOMAIN, ICMP_TIMEOUT, PING_ATTEMPTS_COUNT, PING_TIMEOUT +from .const import CONF_IMPORTED_BY, CONF_PING_COUNT, DOMAIN +from .coordinator import PingUpdateCoordinator _LOGGER = logging.getLogger(__name__) -PARALLEL_UPDATES = 0 -CONF_PING_COUNT = "count" -CONCURRENT_PING_LIMIT = 6 - PLATFORM_SCHEMA = BASE_PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOSTS): {cv.slug: cv.string}, @@ -42,123 +34,91 @@ PLATFORM_SCHEMA = BASE_PLATFORM_SCHEMA.extend( ) -class HostSubProcess: - """Host object with ping detection.""" - - def __init__( - self, - ip_address: str, - dev_id: str, - hass: HomeAssistant, - config: ConfigType, - privileged: bool | None, - ) -> None: - """Initialize the Host pinger.""" - self.hass = hass - self.ip_address = ip_address - self.dev_id = dev_id - self._count = config[CONF_PING_COUNT] - self._ping_cmd = ["ping", "-n", "-q", "-c1", "-W1", ip_address] - - def ping(self) -> bool | None: - """Send an ICMP echo request and return True if success.""" - with subprocess.Popen( - self._ping_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - close_fds=False, # required for posix_spawn - ) as pinger: - try: - pinger.communicate(timeout=1 + PING_TIMEOUT) - return pinger.returncode == 0 - except subprocess.TimeoutExpired: - kill_subprocess(pinger) - return False - - except subprocess.CalledProcessError: - return False - - def update(self) -> bool: - """Update device state by sending one or more ping messages.""" - failed = 0 - while failed < self._count: # check more times if host is unreachable - if self.ping(): - return True - failed += 1 - - _LOGGER.debug("No response from %s failed=%d", self.ip_address, failed) - return False - - async def async_setup_scanner( hass: HomeAssistant, config: ConfigType, async_see: AsyncSeeCallback, discovery_info: DiscoveryInfoType | None = None, ) -> bool: - """Set up the Host objects and return the update function.""" + """Legacy init: import via config flow.""" + + for dev_name, dev_host in config[CONF_HOSTS].items(): + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_IMPORTED_BY: "device_tracker", + CONF_NAME: dev_name, + CONF_HOST: dev_host, + CONF_PING_COUNT: config[CONF_PING_COUNT], + }, + ) + ) + + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.6.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Ping", + }, + ) + + return True + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up a Ping config entry.""" data: PingDomainData = hass.data[DOMAIN] - privileged = data.privileged - ip_to_dev_id = {ip: dev_id for (dev_id, ip) in config[CONF_HOSTS].items()} - interval = config.get( - CONF_SCAN_INTERVAL, - timedelta(seconds=len(ip_to_dev_id) * config[CONF_PING_COUNT]) + SCAN_INTERVAL, - ) - _LOGGER.debug( - "Started ping tracker with interval=%s on hosts: %s", - interval, - ",".join(ip_to_dev_id.keys()), - ) + async_add_entities([PingDeviceTracker(entry, data.coordinators[entry.entry_id])]) - if privileged is None: - hosts = [ - HostSubProcess(ip, dev_id, hass, config, privileged) - for (dev_id, ip) in config[CONF_HOSTS].items() - ] - async def async_update(now: datetime) -> None: - """Update all the hosts on every interval time.""" - results = await gather_with_limited_concurrency( - CONCURRENT_PING_LIMIT, - *(hass.async_add_executor_job(host.update) for host in hosts), - ) - await asyncio.gather( - *( - async_see(dev_id=host.dev_id, source_type=SourceType.ROUTER) - for idx, host in enumerate(hosts) - if results[idx] - ) - ) +class PingDeviceTracker(CoordinatorEntity[PingUpdateCoordinator], ScannerEntity): + """Representation of a Ping device tracker.""" - else: + def __init__( + self, config_entry: ConfigEntry, coordinator: PingUpdateCoordinator + ) -> None: + """Initialize the Ping device tracker.""" + super().__init__(coordinator) - async def async_update(now: datetime) -> None: - """Update all the hosts on every interval time.""" - responses = await async_multiping( - list(ip_to_dev_id), - count=PING_ATTEMPTS_COUNT, - timeout=ICMP_TIMEOUT, - privileged=privileged, - ) - _LOGGER.debug("Multiping responses: %s", responses) - await asyncio.gather( - *( - async_see(dev_id=dev_id, source_type=SourceType.ROUTER) - for idx, dev_id in enumerate(ip_to_dev_id.values()) - if responses[idx].is_alive - ) - ) + self._attr_name = config_entry.title + self.config_entry = config_entry - async def _async_update_interval(now: datetime) -> None: - try: - await async_update(now) - finally: - if not hass.is_stopping: - async_track_point_in_utc_time( - hass, _async_update_interval, now + interval - ) + @property + def ip_address(self) -> str: + """Return the primary ip address of the device.""" + return self.coordinator.data.ip_address - await _async_update_interval(dt_util.now()) - return True + @property + def unique_id(self) -> str: + """Return a unique ID.""" + return self.config_entry.entry_id + + @property + def source_type(self) -> SourceType: + """Return the source type which is router.""" + return SourceType.ROUTER + + @property + def is_connected(self) -> bool: + """Return true if ping returns is_alive.""" + return self.coordinator.data.is_alive + + @property + def entity_registry_enabled_default(self) -> bool: + """Return if entity is enabled by default.""" + if CONF_IMPORTED_BY in self.config_entry.data: + return bool(self.config_entry.data[CONF_IMPORTED_BY] == "device_tracker") + return False diff --git a/homeassistant/components/ping/helpers.py b/homeassistant/components/ping/helpers.py index da58858a801..ce3d5c3b461 100644 --- a/homeassistant/components/ping/helpers.py +++ b/homeassistant/components/ping/helpers.py @@ -33,7 +33,7 @@ class PingData: def __init__(self, hass: HomeAssistant, host: str, count: int) -> None: """Initialize the data object.""" self.hass = hass - self._ip_address = host + self.ip_address = host self._count = count @@ -49,10 +49,10 @@ class PingDataICMPLib(PingData): async def async_update(self) -> None: """Retrieve the latest details from the host.""" - _LOGGER.debug("ping address: %s", self._ip_address) + _LOGGER.debug("ping address: %s", self.ip_address) try: data = await async_ping( - self._ip_address, + self.ip_address, count=self._count, timeout=ICMP_TIMEOUT, privileged=self._privileged, @@ -89,7 +89,7 @@ class PingDataSubProcess(PingData): "-c", str(self._count), "-W1", - self._ip_address, + self.ip_address, ] async def async_ping(self) -> dict[str, Any] | None: diff --git a/homeassistant/components/ping/manifest.json b/homeassistant/components/ping/manifest.json index e27c3a239d0..ded5a3fd3e6 100644 --- a/homeassistant/components/ping/manifest.json +++ b/homeassistant/components/ping/manifest.json @@ -2,6 +2,7 @@ "domain": "ping", "name": "Ping (ICMP)", "codeowners": ["@jpbede"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ping", "iot_class": "local_polling", "loggers": ["icmplib"], diff --git a/homeassistant/components/ping/services.yaml b/homeassistant/components/ping/services.yaml deleted file mode 100644 index c983a105c93..00000000000 --- a/homeassistant/components/ping/services.yaml +++ /dev/null @@ -1 +0,0 @@ -reload: diff --git a/homeassistant/components/ping/strings.json b/homeassistant/components/ping/strings.json index 5b5c5da46bc..12bc1d25c7a 100644 --- a/homeassistant/components/ping/strings.json +++ b/homeassistant/components/ping/strings.json @@ -1,8 +1,34 @@ { - "services": { - "reload": { - "name": "[%key:common::action::reload%]", - "description": "Reloads ping sensors from the YAML-configuration." + "config": { + "step": { + "user": { + "title": "Add Ping", + "description": "Ping allows you to check the availability of a host.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "count": "Ping count" + }, + "data_description": { + "host": "The hostname or IP address of the device you want to ping." + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "invalid_ip_address": "Invalid IP address." + } + }, + "options": { + "step": { + "init": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "count": "[%key:component::ping::config::step::user::data::count%]" + } + } + }, + "abort": { + "invalid_ip_address": "[%key:component::ping::config::abort::invalid_ip_address%]" } } } diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index a33cef0e3a7..efad1b7466b 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -46,6 +46,8 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key = DOMAIN + _previous_mode: str = "heating" + def __init__( self, coordinator: PlugwiseDataUpdateCoordinator, @@ -55,10 +57,15 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): super().__init__(coordinator, device_id) self._attr_extra_state_attributes = {} self._attr_unique_id = f"{device_id}-climate" - + self.cdr_gateway = coordinator.data.gateway + gateway_id: str = coordinator.data.gateway["gateway_id"] + self.gateway_data = coordinator.data.devices[gateway_id] # Determine supported features self._attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - if self.coordinator.data.gateway["cooling_present"]: + if ( + self.cdr_gateway["cooling_present"] + and self.cdr_gateway["smile_name"] != "Adam" + ): self._attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE_RANGE ) @@ -67,12 +74,26 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): self._attr_preset_modes = presets self._attr_min_temp = self.device["thermostat"]["lower_bound"] - self._attr_max_temp = self.device["thermostat"]["upper_bound"] + self._attr_max_temp = min(self.device["thermostat"]["upper_bound"], 35.0) # Ensure we don't drop below 0.1 self._attr_target_temperature_step = max( self.device["thermostat"]["resolution"], 0.1 ) + def _previous_action_mode(self, coordinator: PlugwiseDataUpdateCoordinator) -> None: + """Return the previous action-mode when the regulation-mode is not heating or cooling. + + Helper for set_hvac_mode(). + """ + # When no cooling available, _previous_mode is always heating + if ( + "regulation_modes" in self.gateway_data + and "cooling" in self.gateway_data["regulation_modes"] + ): + mode = self.gateway_data["select_regulation_mode"] + if mode in ("cooling", "heating"): + self._previous_mode = mode + @property def current_temperature(self) -> float: """Return the current temperature.""" @@ -105,33 +126,46 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): @property def hvac_mode(self) -> HVACMode: - """Return HVAC operation ie. auto, heat, or heat_cool mode.""" + """Return HVAC operation ie. auto, cool, heat, heat_cool, or off mode.""" if (mode := self.device.get("mode")) is None or mode not in self.hvac_modes: return HVACMode.HEAT return HVACMode(mode) @property def hvac_modes(self) -> list[HVACMode]: - """Return the list of available HVACModes.""" - hvac_modes = [HVACMode.HEAT] - if self.coordinator.data.gateway["cooling_present"]: - hvac_modes = [HVACMode.HEAT_COOL] + """Return a list of available HVACModes.""" + hvac_modes: list[HVACMode] = [] + if "regulation_modes" in self.gateway_data: + hvac_modes.append(HVACMode.OFF) if self.device["available_schedules"] != ["None"]: hvac_modes.append(HVACMode.AUTO) + if self.cdr_gateway["cooling_present"]: + if "regulation_modes" in self.gateway_data: + if self.gateway_data["select_regulation_mode"] == "cooling": + hvac_modes.append(HVACMode.COOL) + if self.gateway_data["select_regulation_mode"] == "heating": + hvac_modes.append(HVACMode.HEAT) + else: + hvac_modes.append(HVACMode.HEAT_COOL) + else: + hvac_modes.append(HVACMode.HEAT) + return hvac_modes @property - def hvac_action(self) -> HVACAction | None: + def hvac_action(self) -> HVACAction: """Return the current running hvac operation if supported.""" - heater: str | None = self.coordinator.data.gateway["heater_id"] - if heater: - heater_data = self.coordinator.data.devices[heater] - if heater_data["binary_sensors"]["heating_state"]: - return HVACAction.HEATING - if heater_data["binary_sensors"].get("cooling_state"): - return HVACAction.COOLING + # Keep track of the previous action-mode + self._previous_action_mode(self.coordinator) + + heater: str = self.coordinator.data.gateway["heater_id"] + heater_data = self.coordinator.data.devices[heater] + if heater_data["binary_sensors"]["heating_state"]: + return HVACAction.HEATING + if heater_data["binary_sensors"].get("cooling_state", False): + return HVACAction.COOLING return HVACAction.IDLE @@ -168,9 +202,18 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): if hvac_mode not in self.hvac_modes: raise HomeAssistantError("Unsupported hvac_mode") - await self.coordinator.api.set_schedule_state( - self.device["location"], "on" if hvac_mode == HVACMode.AUTO else "off" - ) + if hvac_mode == self.hvac_mode: + return + + if hvac_mode == HVACMode.OFF: + await self.coordinator.api.set_regulation_mode(hvac_mode) + else: + await self.coordinator.api.set_schedule_state( + self.device["location"], + "on" if hvac_mode == HVACMode.AUTO else "off", + ) + if self.hvac_mode == HVACMode.OFF: + await self.coordinator.api.set_regulation_mode(self._previous_mode) @plugwise_command async def async_set_preset_mode(self, preset_mode: str) -> None: diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 1155aaffdf8..1373ba40fa3 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["crcmod", "plugwise"], - "requirements": ["plugwise==0.33.2"], + "requirements": ["plugwise==0.34.3"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/homeassistant/components/plugwise/number.py b/homeassistant/components/plugwise/number.py index 9865aec2242..2c87edddf04 100644 --- a/homeassistant/components/plugwise/number.py +++ b/homeassistant/components/plugwise/number.py @@ -23,19 +23,11 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity -@dataclass -class PlugwiseEntityDescriptionMixin: - """Mixin values for Plugwise entities.""" - - command: Callable[[Smile, str, str, float], Awaitable[None]] - - -@dataclass -class PlugwiseNumberEntityDescription( - NumberEntityDescription, PlugwiseEntityDescriptionMixin -): +@dataclass(kw_only=True) +class PlugwiseNumberEntityDescription(NumberEntityDescription): """Class describing Plugwise Number entities.""" + command: Callable[[Smile, str, str, float], Awaitable[None]] key: NumberType diff --git a/homeassistant/components/plugwise/select.py b/homeassistant/components/plugwise/select.py index 138e5fe3b59..c12ca671554 100644 --- a/homeassistant/components/plugwise/select.py +++ b/homeassistant/components/plugwise/select.py @@ -18,21 +18,13 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity -@dataclass -class PlugwiseSelectDescriptionMixin: - """Mixin values for Plugwise Select entities.""" - - command: Callable[[Smile, str, str], Awaitable[None]] - options_key: SelectOptionsType - - -@dataclass -class PlugwiseSelectEntityDescription( - SelectEntityDescription, PlugwiseSelectDescriptionMixin -): +@dataclass(kw_only=True) +class PlugwiseSelectEntityDescription(SelectEntityDescription): """Class describing Plugwise Select entities.""" + command: Callable[[Smile, str, str], Awaitable[None]] key: SelectType + options_key: SelectOptionsType SELECT_TYPES = ( diff --git a/homeassistant/components/private_ble_device/manifest.json b/homeassistant/components/private_ble_device/manifest.json index 663461ceaa1..d894b18f545 100644 --- a/homeassistant/components/private_ble_device/manifest.json +++ b/homeassistant/components/private_ble_device/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/private_ble_device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.14.0"] + "requirements": ["bluetooth-data-tools==1.16.0"] } diff --git a/homeassistant/components/private_ble_device/sensor.py b/homeassistant/components/private_ble_device/sensor.py index b332d057ba9..d15ed1163b7 100644 --- a/homeassistant/components/private_ble_device/sensor.py +++ b/homeassistant/components/private_ble_device/sensor.py @@ -83,13 +83,17 @@ SENSOR_DESCRIPTIONS = ( native_unit_of_measurement=UnitOfTime.SECONDS, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda hass, service_info: bluetooth.async_get_learned_advertising_interval( - hass, service_info.address - ) - or bluetooth.async_get_fallback_availability_interval( - hass, service_info.address - ) - or bluetooth.FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS, + value_fn=( + lambda hass, service_info: ( + bluetooth.async_get_learned_advertising_interval( + hass, service_info.address + ) + or bluetooth.async_get_fallback_availability_interval( + hass, service_info.address + ) + or bluetooth.FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS + ) + ), suggested_display_precision=1, ), ) diff --git a/homeassistant/components/progettihwsw/strings.json b/homeassistant/components/progettihwsw/strings.json index bb98d565594..d50c6f8d4e3 100644 --- a/homeassistant/components/progettihwsw/strings.json +++ b/homeassistant/components/progettihwsw/strings.json @@ -13,6 +13,9 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your ProgettiHWSW board." } }, "relay_modes": { diff --git a/homeassistant/components/prometheus/__init__.py b/homeassistant/components/prometheus/__init__.py index c96ed2e4ed3..7beac4cc54b 100644 --- a/homeassistant/components/prometheus/__init__.py +++ b/homeassistant/components/prometheus/__init__.py @@ -19,6 +19,7 @@ from homeassistant.components.climate import ( from homeassistant.components.cover import ATTR_POSITION, ATTR_TILT_POSITION from homeassistant.components.http import HomeAssistantView from homeassistant.components.humidifier import ATTR_AVAILABLE_MODES, ATTR_HUMIDITY +from homeassistant.components.light import ATTR_BRIGHTNESS from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( ATTR_BATTERY_LEVEL, @@ -323,14 +324,14 @@ class PrometheusMetrics: } def _battery(self, state): - if "battery_level" in state.attributes: + if (battery_level := state.attributes.get(ATTR_BATTERY_LEVEL)) is not None: metric = self._metric( "battery_level_percent", self.prometheus_cli.Gauge, "Battery level as a percentage of its capacity", ) try: - value = float(state.attributes[ATTR_BATTERY_LEVEL]) + value = float(battery_level) metric.labels(**self._labels(state)).set(value) except ValueError: pass @@ -353,18 +354,18 @@ class PrometheusMetrics: value = self.state_as_number(state) metric.labels(**self._labels(state)).set(value) - def _handle_input_number(self, state): + def _numeric_handler(self, state, domain, title): if unit := self._unit_string(state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)): metric = self._metric( - f"input_number_state_{unit}", + f"{domain}_state_{unit}", self.prometheus_cli.Gauge, - f"State of the input number measured in {unit}", + f"State of the {title} measured in {unit}", ) else: metric = self._metric( - "input_number_state", + f"{domain}_state", self.prometheus_cli.Gauge, - "State of the input number", + f"State of the {title}", ) with suppress(ValueError): @@ -378,6 +379,12 @@ class PrometheusMetrics: ) metric.labels(**self._labels(state)).set(value) + def _handle_input_number(self, state): + self._numeric_handler(state, "input_number", "input number") + + def _handle_number(self, state): + self._numeric_handler(state, "number", "number") + def _handle_device_tracker(self, state): metric = self._metric( "device_tracker_state", @@ -434,8 +441,9 @@ class PrometheusMetrics: ) try: - if "brightness" in state.attributes and state.state == STATE_ON: - value = state.attributes["brightness"] / 255.0 + brightness = state.attributes.get(ATTR_BRIGHTNESS) + if state.state == STATE_ON and brightness is not None: + value = brightness / 255.0 else: value = self.state_as_number(state) value = value * 100 diff --git a/homeassistant/components/proximity/manifest.json b/homeassistant/components/proximity/manifest.json index c09a03b2438..3f1ea950d0e 100644 --- a/homeassistant/components/proximity/manifest.json +++ b/homeassistant/components/proximity/manifest.json @@ -1,7 +1,7 @@ { "domain": "proximity", "name": "Proximity", - "codeowners": [], + "codeowners": ["@mib1185"], "dependencies": ["device_tracker", "zone"], "documentation": "https://www.home-assistant.io/integrations/proximity", "iot_class": "calculated", diff --git a/homeassistant/components/pure_energie/manifest.json b/homeassistant/components/pure_energie/manifest.json index 4c83b5e3651..19098c41208 100644 --- a/homeassistant/components/pure_energie/manifest.json +++ b/homeassistant/components/pure_energie/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/pure_energie", "iot_class": "local_polling", "quality_scale": "platinum", - "requirements": ["gridnet==4.2.0"], + "requirements": ["gridnet==5.0.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/pushbullet/api.py b/homeassistant/components/pushbullet/api.py index ff6a57aa931..691ef7413c3 100644 --- a/homeassistant/components/pushbullet/api.py +++ b/homeassistant/components/pushbullet/api.py @@ -1,4 +1,5 @@ """Pushbullet Notification provider.""" +from __future__ import annotations from typing import Any @@ -10,7 +11,7 @@ from homeassistant.helpers.dispatcher import dispatcher_send from .const import DATA_UPDATED -class PushBulletNotificationProvider(Listener): +class PushBulletNotificationProvider(Listener): # type: ignore[misc] """Provider for an account, leading to one or more sensors.""" def __init__(self, hass: HomeAssistant, pushbullet: PushBullet) -> None: diff --git a/homeassistant/components/pushbullet/notify.py b/homeassistant/components/pushbullet/notify.py index 1cc851bdb99..662240d0bf5 100644 --- a/homeassistant/components/pushbullet/notify.py +++ b/homeassistant/components/pushbullet/notify.py @@ -21,6 +21,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from .api import PushBulletNotificationProvider from .const import ATTR_FILE, ATTR_FILE_URL, ATTR_URL, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -34,8 +35,10 @@ async def async_get_service( """Get the Pushbullet notification service.""" if TYPE_CHECKING: assert discovery_info is not None - pushbullet: PushBullet = hass.data[DOMAIN][discovery_info["entry_id"]].pushbullet - return PushBulletNotificationService(hass, pushbullet) + pb_provider: PushBulletNotificationProvider = hass.data[DOMAIN][ + discovery_info["entry_id"] + ] + return PushBulletNotificationService(hass, pb_provider.pushbullet) class PushBulletNotificationService(BaseNotificationService): @@ -120,7 +123,7 @@ class PushBulletNotificationService(BaseNotificationService): pusher: PushBullet, email: str | None = None, phonenumber: str | None = None, - ): + ) -> None: """Create the message content.""" kwargs = {"body": message, "title": title} if email: diff --git a/homeassistant/components/pvoutput/manifest.json b/homeassistant/components/pvoutput/manifest.json index 9e66d79d2bd..61bd6fd6164 100644 --- a/homeassistant/components/pvoutput/manifest.json +++ b/homeassistant/components/pvoutput/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["pvo==2.1.0"] + "requirements": ["pvo==2.1.1"] } diff --git a/homeassistant/components/pvoutput/sensor.py b/homeassistant/components/pvoutput/sensor.py index bcf869d3bba..d9ef71bee69 100644 --- a/homeassistant/components/pvoutput/sensor.py +++ b/homeassistant/components/pvoutput/sensor.py @@ -28,20 +28,13 @@ from .const import CONF_SYSTEM_ID, DOMAIN from .coordinator import PVOutputDataUpdateCoordinator -@dataclass -class PVOutputSensorEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class PVOutputSensorEntityDescription(SensorEntityDescription): + """Describes a PVOutput sensor entity.""" value_fn: Callable[[Status], int | float | None] -@dataclass -class PVOutputSensorEntityDescription( - SensorEntityDescription, PVOutputSensorEntityDescriptionMixin -): - """Describes a PVOutput sensor entity.""" - - SENSORS: tuple[PVOutputSensorEntityDescription, ...] = ( PVOutputSensorEntityDescription( key="energy_consumption", diff --git a/homeassistant/components/pvpc_hourly_pricing/__init__.py b/homeassistant/components/pvpc_hourly_pricing/__init__.py index 808ff1b4cc4..7071000ffd9 100644 --- a/homeassistant/components/pvpc_hourly_pricing/__init__.py +++ b/homeassistant/components/pvpc_hourly_pricing/__init__.py @@ -2,38 +2,21 @@ from datetime import timedelta import logging -from aiopvpc import DEFAULT_POWER_KW, TARIFFS, EsiosApiData, PVPCData -import voluptuous as vol +from aiopvpc import BadApiTokenAuthError, EsiosApiData, PVPCData from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_NAME +from homeassistant.const import CONF_API_TOKEN, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import dt as dt_util -from .const import ( - ATTR_POWER, - ATTR_POWER_P3, - ATTR_TARIFF, - DEFAULT_NAME, - DOMAIN, - PLATFORMS, -) +from .const import ATTR_POWER, ATTR_POWER_P3, ATTR_TARIFF, DOMAIN _LOGGER = logging.getLogger(__name__) -_DEFAULT_TARIFF = TARIFFS[0] -VALID_POWER = vol.All(vol.Coerce(float), vol.Range(min=1.0, max=15.0)) -VALID_TARIFF = vol.In(TARIFFS) -UI_CONFIG_SCHEMA = vol.Schema( - { - vol.Required(CONF_NAME, default=DEFAULT_NAME): str, - vol.Required(ATTR_TARIFF, default=_DEFAULT_TARIFF): VALID_TARIFF, - vol.Required(ATTR_POWER, default=DEFAULT_POWER_KW): VALID_POWER, - vol.Required(ATTR_POWER_P3, default=DEFAULT_POWER_KW): VALID_POWER, - } -) +PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) @@ -52,7 +35,7 @@ async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" if any( entry.data.get(attrib) != entry.options.get(attrib) - for attrib in (ATTR_POWER, ATTR_POWER_P3) + for attrib in (ATTR_POWER, ATTR_POWER_P3, CONF_API_TOKEN) ): # update entry replacing data with new options hass.config_entries.async_update_entry( @@ -80,6 +63,7 @@ class ElecPricesDataUpdateCoordinator(DataUpdateCoordinator[EsiosApiData]): local_timezone=hass.config.time_zone, power=entry.data[ATTR_POWER], power_valley=entry.data[ATTR_POWER_P3], + api_token=entry.data.get(CONF_API_TOKEN), ) super().__init__( hass, _LOGGER, name=DOMAIN, update_interval=timedelta(minutes=30) @@ -93,7 +77,10 @@ class ElecPricesDataUpdateCoordinator(DataUpdateCoordinator[EsiosApiData]): async def _async_update_data(self) -> EsiosApiData: """Update electricity prices from the ESIOS API.""" - api_data = await self.api.async_update_all(self.data, dt_util.utcnow()) + try: + api_data = await self.api.async_update_all(self.data, dt_util.utcnow()) + except BadApiTokenAuthError as exc: + raise ConfigEntryAuthFailed from exc if ( not api_data or not api_data.sensors diff --git a/homeassistant/components/pvpc_hourly_pricing/config_flow.py b/homeassistant/components/pvpc_hourly_pricing/config_flow.py index 9412aa2e97d..66092cb9211 100644 --- a/homeassistant/components/pvpc_hourly_pricing/config_flow.py +++ b/homeassistant/components/pvpc_hourly_pricing/config_flow.py @@ -1,22 +1,49 @@ """Config flow for pvpc_hourly_pricing.""" from __future__ import annotations +from collections.abc import Mapping from typing import Any +from aiopvpc import DEFAULT_POWER_KW, PVPCData import voluptuous as vol from homeassistant import config_entries +from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import callback from homeassistant.data_entry_flow import FlowResult +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.util import dt as dt_util -from . import CONF_NAME, UI_CONFIG_SCHEMA, VALID_POWER -from .const import ATTR_POWER, ATTR_POWER_P3, ATTR_TARIFF, DOMAIN +from .const import ( + ATTR_POWER, + ATTR_POWER_P3, + ATTR_TARIFF, + CONF_USE_API_TOKEN, + DEFAULT_NAME, + DEFAULT_TARIFF, + DOMAIN, + VALID_POWER, + VALID_TARIFF, +) + +_MAIL_TO_LINK = ( + "[consultasios@ree.es]" + "(mailto:consultasios@ree.es?subject=Personal%20token%20request)" +) class TariffSelectorConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle config flow for `pvpc_hourly_pricing`.""" VERSION = 1 + _name: str | None = None + _tariff: str | None = None + _power: float | None = None + _power_p3: float | None = None + _use_api_token: bool = False + _api_token: str | None = None + _api: PVPCData | None = None + _reauth_entry: config_entries.ConfigEntry | None = None @staticmethod @callback @@ -33,36 +60,184 @@ class TariffSelectorConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): if user_input is not None: await self.async_set_unique_id(user_input[ATTR_TARIFF]) self._abort_if_unique_id_configured() - return self.async_create_entry(title=user_input[CONF_NAME], data=user_input) + if not user_input[CONF_USE_API_TOKEN]: + return self.async_create_entry( + title=user_input[CONF_NAME], + data={ + CONF_NAME: user_input[CONF_NAME], + ATTR_TARIFF: user_input[ATTR_TARIFF], + ATTR_POWER: user_input[ATTR_POWER], + ATTR_POWER_P3: user_input[ATTR_POWER_P3], + CONF_API_TOKEN: None, + }, + ) - return self.async_show_form(step_id="user", data_schema=UI_CONFIG_SCHEMA) + self._name = user_input[CONF_NAME] + self._tariff = user_input[ATTR_TARIFF] + self._power = user_input[ATTR_POWER] + self._power_p3 = user_input[ATTR_POWER_P3] + self._use_api_token = user_input[CONF_USE_API_TOKEN] + return await self.async_step_api_token() + + data_schema = vol.Schema( + { + vol.Required(CONF_NAME, default=DEFAULT_NAME): str, + vol.Required(ATTR_TARIFF, default=DEFAULT_TARIFF): VALID_TARIFF, + vol.Required(ATTR_POWER, default=DEFAULT_POWER_KW): VALID_POWER, + vol.Required(ATTR_POWER_P3, default=DEFAULT_POWER_KW): VALID_POWER, + vol.Required(CONF_USE_API_TOKEN, default=False): bool, + } + ) + return self.async_show_form(step_id="user", data_schema=data_schema) + + async def async_step_api_token( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle optional step to define API token for extra sensors.""" + if user_input is not None: + self._api_token = user_input[CONF_API_TOKEN] + return await self._async_verify( + "api_token", + data_schema=vol.Schema( + {vol.Required(CONF_API_TOKEN, default=self._api_token): str} + ), + ) + return self.async_show_form( + step_id="api_token", + data_schema=vol.Schema( + {vol.Required(CONF_API_TOKEN, default=self._api_token): str} + ), + description_placeholders={"mail_to_link": _MAIL_TO_LINK}, + ) + + async def _async_verify(self, step_id: str, data_schema: vol.Schema) -> FlowResult: + """Attempt to verify the provided configuration.""" + errors: dict[str, str] = {} + auth_ok = True + if self._use_api_token: + if not self._api: + self._api = PVPCData(session=async_get_clientsession(self.hass)) + auth_ok = await self._api.check_api_token(dt_util.utcnow(), self._api_token) + if not auth_ok: + errors["base"] = "invalid_auth" + return self.async_show_form( + step_id=step_id, + data_schema=data_schema, + errors=errors, + description_placeholders={"mail_to_link": _MAIL_TO_LINK}, + ) + + data = { + CONF_NAME: self._name, + ATTR_TARIFF: self._tariff, + ATTR_POWER: self._power, + ATTR_POWER_P3: self._power_p3, + CONF_API_TOKEN: self._api_token if self._use_api_token else None, + } + if self._reauth_entry: + self.hass.config_entries.async_update_entry(self._reauth_entry, data=data) + self.hass.async_create_task( + self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + ) + return self.async_abort(reason="reauth_successful") + + assert self._name is not None + return self.async_create_entry(title=self._name, data=data) + + async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult: + """Handle re-authentication with ESIOS Token.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + self._api_token = entry_data.get(CONF_API_TOKEN) + self._use_api_token = self._api_token is not None + self._name = entry_data[CONF_NAME] + self._tariff = entry_data[ATTR_TARIFF] + self._power = entry_data[ATTR_POWER] + self._power_p3 = entry_data[ATTR_POWER_P3] + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Confirm reauth dialog.""" + data_schema = vol.Schema( + { + vol.Required(CONF_USE_API_TOKEN, default=self._use_api_token): bool, + vol.Optional(CONF_API_TOKEN, default=self._api_token): str, + } + ) + if user_input: + self._api_token = user_input[CONF_API_TOKEN] + self._use_api_token = user_input[CONF_USE_API_TOKEN] + return await self._async_verify("reauth_confirm", data_schema) + return self.async_show_form(step_id="reauth_confirm", data_schema=data_schema) -class PVPCOptionsFlowHandler(config_entries.OptionsFlow): +class PVPCOptionsFlowHandler(config_entries.OptionsFlowWithConfigEntry): """Handle PVPC options.""" - def __init__(self, config_entry: config_entries.ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry + _power: float | None = None + _power_p3: float | None = None + + async def async_step_api_token( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle optional step to define API token for extra sensors.""" + if user_input is not None and user_input.get(CONF_API_TOKEN): + return self.async_create_entry( + title="", + data={ + ATTR_POWER: self._power, + ATTR_POWER_P3: self._power_p3, + CONF_API_TOKEN: user_input[CONF_API_TOKEN], + }, + ) + + # Fill options with entry data + api_token = self.options.get( + CONF_API_TOKEN, self.config_entry.data.get(CONF_API_TOKEN) + ) + return self.async_show_form( + step_id="api_token", + data_schema=vol.Schema( + {vol.Required(CONF_API_TOKEN, default=api_token): str} + ), + description_placeholders={"mail_to_link": _MAIL_TO_LINK}, + ) async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> FlowResult: """Manage the options.""" if user_input is not None: - return self.async_create_entry(title="", data=user_input) + if user_input[CONF_USE_API_TOKEN]: + self._power = user_input[ATTR_POWER] + self._power_p3 = user_input[ATTR_POWER_P3] + return await self.async_step_api_token(user_input) + return self.async_create_entry( + title="", + data={ + ATTR_POWER: user_input[ATTR_POWER], + ATTR_POWER_P3: user_input[ATTR_POWER_P3], + CONF_API_TOKEN: None, + }, + ) # Fill options with entry data - power = self.config_entry.options.get( - ATTR_POWER, self.config_entry.data[ATTR_POWER] - ) - power_valley = self.config_entry.options.get( + power = self.options.get(ATTR_POWER, self.config_entry.data[ATTR_POWER]) + power_valley = self.options.get( ATTR_POWER_P3, self.config_entry.data[ATTR_POWER_P3] ) + api_token = self.options.get( + CONF_API_TOKEN, self.config_entry.data.get(CONF_API_TOKEN) + ) + use_api_token = api_token is not None schema = vol.Schema( { vol.Required(ATTR_POWER, default=power): VALID_POWER, vol.Required(ATTR_POWER_P3, default=power_valley): VALID_POWER, + vol.Required(CONF_USE_API_TOKEN, default=use_api_token): bool, } ) return self.async_show_form(step_id="init", data_schema=schema) diff --git a/homeassistant/components/pvpc_hourly_pricing/const.py b/homeassistant/components/pvpc_hourly_pricing/const.py index 186ee1171f3..ea4d97620ec 100644 --- a/homeassistant/components/pvpc_hourly_pricing/const.py +++ b/homeassistant/components/pvpc_hourly_pricing/const.py @@ -1,9 +1,15 @@ """Constant values for pvpc_hourly_pricing.""" -from homeassistant.const import Platform +from aiopvpc import TARIFFS +import voluptuous as vol DOMAIN = "pvpc_hourly_pricing" -PLATFORMS = [Platform.SENSOR] + ATTR_POWER = "power" ATTR_POWER_P3 = "power_p3" ATTR_TARIFF = "tariff" DEFAULT_NAME = "PVPC" +CONF_USE_API_TOKEN = "use_api_token" + +VALID_POWER = vol.All(vol.Coerce(float), vol.Range(min=1.0, max=15.0)) +VALID_TARIFF = vol.In(TARIFFS) +DEFAULT_TARIFF = TARIFFS[0] diff --git a/homeassistant/components/pvpc_hourly_pricing/strings.json b/homeassistant/components/pvpc_hourly_pricing/strings.json index 1a0055ddbac..4236709fdfa 100644 --- a/homeassistant/components/pvpc_hourly_pricing/strings.json +++ b/homeassistant/components/pvpc_hourly_pricing/strings.json @@ -6,12 +6,31 @@ "name": "Sensor Name", "tariff": "Applicable tariff by geographic zone", "power": "Contracted power (kW)", - "power_p3": "Contracted power for valley period P3 (kW)" + "power_p3": "Contracted power for valley period P3 (kW)", + "use_api_token": "Enable ESIOS Personal API token for private access" + } + }, + "api_token": { + "title": "ESIOS API token", + "description": "To use the extended API you must request a personal token by mailing to {mail_to_link}.", + "data": { + "api_token": "[%key:common::config_flow::data::api_token%]" + } + }, + "reauth_confirm": { + "data": { + "description": "Re-authenticate with a valid token or disable it", + "use_api_token": "[%key:component::pvpc_hourly_pricing::config::step::user::data::use_api_token%]", + "api_token": "[%key:common::config_flow::data::api_token%]" } } }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "options": { @@ -19,7 +38,15 @@ "init": { "data": { "power": "[%key:component::pvpc_hourly_pricing::config::step::user::data::power%]", - "power_p3": "[%key:component::pvpc_hourly_pricing::config::step::user::data::power_p3%]" + "power_p3": "[%key:component::pvpc_hourly_pricing::config::step::user::data::power_p3%]", + "use_api_token": "[%key:component::pvpc_hourly_pricing::config::step::user::data::use_api_token%]" + } + }, + "api_token": { + "title": "[%key:component::pvpc_hourly_pricing::config::step::api_token::title%]", + "description": "[%key:component::pvpc_hourly_pricing::config::step::api_token::description%]", + "data": { + "api_token": "[%key:common::config_flow::data::api_token%]" } } } diff --git a/homeassistant/components/python_script/manifest.json b/homeassistant/components/python_script/manifest.json index bd034053a34..dcc0e38c737 100644 --- a/homeassistant/components/python_script/manifest.json +++ b/homeassistant/components/python_script/manifest.json @@ -5,8 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/python_script", "loggers": ["RestrictedPython"], "quality_scale": "internal", - "requirements": [ - "RestrictedPython==6.2;python_version<'3.12'", - "RestrictedPython==7.0a1.dev0;python_version>='3.12'" - ] + "requirements": ["RestrictedPython==7.0"] } diff --git a/homeassistant/components/qnap/strings.json b/homeassistant/components/qnap/strings.json index a5fa3c8a897..d535b9f0e87 100644 --- a/homeassistant/components/qnap/strings.json +++ b/homeassistant/components/qnap/strings.json @@ -11,6 +11,9 @@ "port": "[%key:common::config_flow::data::port%]", "ssl": "[%key:common::config_flow::data::ssl%]", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of your QNAP device." } } }, diff --git a/homeassistant/components/rainbird/__init__.py b/homeassistant/components/rainbird/__init__.py index e7a7c1200b9..e5731dc08fe 100644 --- a/homeassistant/components/rainbird/__init__.py +++ b/homeassistant/components/rainbird/__init__.py @@ -10,10 +10,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import format_mac -from homeassistant.helpers.entity_registry import async_entries_for_config_entry from .const import CONF_SERIAL_NUMBER from .coordinator import RainbirdData @@ -55,6 +54,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: format_mac(mac_address), str(entry.data[CONF_SERIAL_NUMBER]), ) + _async_fix_device_id( + hass, + dr.async_get(hass), + entry.entry_id, + format_mac(mac_address), + str(entry.data[CONF_SERIAL_NUMBER]), + ) try: model_info = await controller.get_model_and_version() @@ -124,7 +130,7 @@ def _async_fix_entity_unique_id( serial_number: str, ) -> None: """Migrate existing entity if current one can't be found and an old one exists.""" - entity_entries = async_entries_for_config_entry(entity_registry, config_entry_id) + entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id) for entity_entry in entity_entries: unique_id = str(entity_entry.unique_id) if unique_id.startswith(mac_address): @@ -137,6 +143,70 @@ def _async_fix_entity_unique_id( ) +def _async_device_entry_to_keep( + old_entry: dr.DeviceEntry, new_entry: dr.DeviceEntry +) -> dr.DeviceEntry: + """Determine which device entry to keep when there are duplicates. + + As we transitioned to new unique ids, we did not update existing device entries + and as a result there are devices with both the old and new unique id format. We + have to pick which one to keep, and preferably this can repair things if the + user previously renamed devices. + """ + # Prefer the new device if the user already gave it a name or area. Otherwise, + # do the same for the old entry. If no entries have been modified then keep the new one. + if new_entry.disabled_by is None and ( + new_entry.area_id is not None or new_entry.name_by_user is not None + ): + return new_entry + if old_entry.disabled_by is None and ( + old_entry.area_id is not None or old_entry.name_by_user is not None + ): + return old_entry + return new_entry if new_entry.disabled_by is None else old_entry + + +def _async_fix_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry_id: str, + mac_address: str, + serial_number: str, +) -> None: + """Migrate existing device identifiers to the new format. + + This will rename any device ids that are prefixed with the serial number to be prefixed + with the mac address. This also cleans up from a bug that allowed devices to exist + in both the old and new format. + """ + device_entries = dr.async_entries_for_config_entry(device_registry, config_entry_id) + device_entry_map = {} + migrations = {} + for device_entry in device_entries: + unique_id = str(next(iter(device_entry.identifiers))[1]) + device_entry_map[unique_id] = device_entry + if (suffix := unique_id.removeprefix(str(serial_number))) != unique_id: + migrations[unique_id] = f"{mac_address}{suffix}" + + for unique_id, new_unique_id in migrations.items(): + old_entry = device_entry_map[unique_id] + if (new_entry := device_entry_map.get(new_unique_id)) is not None: + # Device entries exist for both the old and new format and one must be removed + entry_to_keep = _async_device_entry_to_keep(old_entry, new_entry) + if entry_to_keep == new_entry: + _LOGGER.debug("Removing device entry %s", unique_id) + device_registry.async_remove_device(old_entry.id) + continue + # Remove new entry and update old entry to new id below + _LOGGER.debug("Removing device entry %s", new_unique_id) + device_registry.async_remove_device(new_entry.id) + + _LOGGER.debug("Updating device id from %s to %s", unique_id, new_unique_id) + device_registry.async_update_device( + old_entry.id, new_identifiers={(DOMAIN, new_unique_id)} + ) + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" diff --git a/homeassistant/components/rainbird/manifest.json b/homeassistant/components/rainbird/manifest.json index 07a0bc0a5f6..b8cb86264f2 100644 --- a/homeassistant/components/rainbird/manifest.json +++ b/homeassistant/components/rainbird/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/rainbird", "iot_class": "local_polling", "loggers": ["pyrainbird"], - "requirements": ["pyrainbird==4.0.0"] + "requirements": ["pyrainbird==4.0.1"] } diff --git a/homeassistant/components/rainmachine/__init__.py b/homeassistant/components/rainmachine/__init__.py index c29154a941c..fde9b945e53 100644 --- a/homeassistant/components/rainmachine/__init__.py +++ b/homeassistant/components/rainmachine/__init__.py @@ -2,7 +2,7 @@ from __future__ import annotations import asyncio -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from dataclasses import dataclass from datetime import timedelta from functools import partial, wraps @@ -326,10 +326,17 @@ async def async_setup_entry( # noqa: C901 entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - def call_with_controller(update_programs_and_zones: bool = True) -> Callable: + def call_with_controller( + update_programs_and_zones: bool = True, + ) -> Callable[ + [Callable[[ServiceCall, Controller], Coroutine[Any, Any, None]]], + Callable[[ServiceCall], Coroutine[Any, Any, None]], + ]: """Hydrate a service call with the appropriate controller.""" - def decorator(func: Callable) -> Callable[..., Awaitable]: + def decorator( + func: Callable[[ServiceCall, Controller], Coroutine[Any, Any, None]] + ) -> Callable[[ServiceCall], Coroutine[Any, Any, None]]: """Define the decorator.""" @wraps(func) diff --git a/homeassistant/components/rdw/binary_sensor.py b/homeassistant/components/rdw/binary_sensor.py index 16a93485b36..96311266db4 100644 --- a/homeassistant/components/rdw/binary_sensor.py +++ b/homeassistant/components/rdw/binary_sensor.py @@ -23,20 +23,13 @@ from homeassistant.helpers.update_coordinator import ( from .const import DOMAIN -@dataclass -class RDWBinarySensorEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class RDWBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes RDW binary sensor entity.""" is_on_fn: Callable[[Vehicle], bool | None] -@dataclass -class RDWBinarySensorEntityDescription( - BinarySensorEntityDescription, RDWBinarySensorEntityDescriptionMixin -): - """Describes RDW binary sensor entity.""" - - BINARY_SENSORS: tuple[RDWBinarySensorEntityDescription, ...] = ( RDWBinarySensorEntityDescription( key="liability_insured", diff --git a/homeassistant/components/rdw/manifest.json b/homeassistant/components/rdw/manifest.json index e63478976e3..f44dc7e0f12 100644 --- a/homeassistant/components/rdw/manifest.json +++ b/homeassistant/components/rdw/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["vehicle==2.2.0"] + "requirements": ["vehicle==2.2.1"] } diff --git a/homeassistant/components/rdw/sensor.py b/homeassistant/components/rdw/sensor.py index f330ac16b8e..d25c23c09bd 100644 --- a/homeassistant/components/rdw/sensor.py +++ b/homeassistant/components/rdw/sensor.py @@ -24,20 +24,13 @@ from homeassistant.helpers.update_coordinator import ( from .const import CONF_LICENSE_PLATE, DOMAIN -@dataclass -class RDWSensorEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class RDWSensorEntityDescription(SensorEntityDescription): + """Describes RDW sensor entity.""" value_fn: Callable[[Vehicle], date | str | float | None] -@dataclass -class RDWSensorEntityDescription( - SensorEntityDescription, RDWSensorEntityDescriptionMixin -): - """Describes RDW sensor entity.""" - - SENSORS: tuple[RDWSensorEntityDescription, ...] = ( RDWSensorEntityDescription( key="apk_expiration", diff --git a/homeassistant/components/recorder/auto_repairs/schema.py b/homeassistant/components/recorder/auto_repairs/schema.py index aa036f33999..aedf917dd22 100644 --- a/homeassistant/components/recorder/auto_repairs/schema.py +++ b/homeassistant/components/recorder/auto_repairs/schema.py @@ -101,9 +101,8 @@ def _validate_table_schema_has_correct_collation( collate = ( dialect_kwargs.get("mysql_collate") - or dialect_kwargs.get( - "mariadb_collate" - ) # pylint: disable-next=protected-access + or dialect_kwargs.get("mariadb_collate") + # pylint: disable-next=protected-access or connection.dialect._fetch_setting(connection, "collation_server") # type: ignore[attr-defined] ) if collate and collate != "utf8mb4_unicode_ci": diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index 06c8cf68903..b864e104ae6 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -176,13 +176,17 @@ class NativeLargeBinary(LargeBinary): # For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32 # for sqlite and postgresql we use a bigint UINT_32_TYPE = BigInteger().with_variant( - mysql.INTEGER(unsigned=True), "mysql", "mariadb" # type: ignore[no-untyped-call] + mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call] + "mysql", + "mariadb", ) JSON_VARIANT_CAST = Text().with_variant( - postgresql.JSON(none_as_null=True), "postgresql" # type: ignore[no-untyped-call] + postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call] + "postgresql", ) JSONB_VARIANT_CAST = Text().with_variant( - postgresql.JSONB(none_as_null=True), "postgresql" # type: ignore[no-untyped-call] + postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call] + "postgresql", ) DATETIME_TYPE = ( DateTime(timezone=True) diff --git a/homeassistant/components/recorder/filters.py b/homeassistant/components/recorder/filters.py index bf76c7264d5..fda8716df27 100644 --- a/homeassistant/components/recorder/filters.py +++ b/homeassistant/components/recorder/filters.py @@ -244,7 +244,8 @@ class Filters: ), # Needs https://github.com/bdraco/home-assistant/commit/bba91945006a46f3a01870008eb048e4f9cbb1ef self._generate_filter_for_columns( - (ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), _encoder # type: ignore[arg-type] + (ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), # type: ignore[arg-type] + _encoder, ).self_group(), ) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 68c357c0ed4..da58822e266 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -527,31 +527,37 @@ def _get_start_time_state_for_entities_stmt( ) -> Select: """Baked query to get states for specific entities.""" # We got an include-list of entities, accelerate the query by filtering already - # in the inner query. - stmt = _stmt_and_join_attributes_for_start_state( - no_attributes, include_last_changed - ).join( - ( - most_recent_states_for_entities_by_date := ( - select( - States.metadata_id.label("max_metadata_id"), - func.max(States.last_updated_ts).label("max_last_updated"), + # in the inner and the outer query. + stmt = ( + _stmt_and_join_attributes_for_start_state(no_attributes, include_last_changed) + .join( + ( + most_recent_states_for_entities_by_date := ( + select( + States.metadata_id.label("max_metadata_id"), + func.max(States.last_updated_ts).label("max_last_updated"), + ) + .filter( + (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < epoch_time) + & States.metadata_id.in_(metadata_ids) + ) + .group_by(States.metadata_id) + .subquery() ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - ) - .filter(States.metadata_id.in_(metadata_ids)) - .group_by(States.metadata_id) - .subquery() - ) - ), - and_( - States.metadata_id - == most_recent_states_for_entities_by_date.c.max_metadata_id, - States.last_updated_ts - == most_recent_states_for_entities_by_date.c.max_last_updated, - ), + ), + and_( + States.metadata_id + == most_recent_states_for_entities_by_date.c.max_metadata_id, + States.last_updated_ts + == most_recent_states_for_entities_by_date.c.max_last_updated, + ), + ) + .filter( + (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < epoch_time) + & States.metadata_id.in_(metadata_ids) + ) ) if no_attributes: return stmt diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 8808ed2fd2b..427e3acab2d 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -532,7 +532,9 @@ def _update_states_table_with_foreign_key_options( states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints old_states_table = Table( # noqa: F841 - TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters) # type: ignore[arg-type] + TABLE_STATES, + MetaData(), + *(alter["old_fk"] for alter in alters), # type: ignore[arg-type] ) for alter in alters: diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 8bc6584c5a1..8dd539f84f3 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -41,7 +41,7 @@ from .queries import ( find_statistics_runs_to_purge, ) from .repack import repack_database -from .util import chunked, retryable_database_job, session_scope +from .util import chunked_or_all, retryable_database_job, session_scope if TYPE_CHECKING: from . import Recorder @@ -283,12 +283,16 @@ def _select_event_data_ids_to_purge( def _select_unused_attributes_ids( - session: Session, attributes_ids: set[int], database_engine: DatabaseEngine + instance: Recorder, + session: Session, + attributes_ids: set[int], + database_engine: DatabaseEngine, ) -> set[int]: """Return a set of attributes ids that are not used by any states in the db.""" if not attributes_ids: return set() + seen_ids: set[int] = set() if not database_engine.optimizer.slow_range_in_select: # # SQLite has a superior query optimizer for the distinct query below as it uses @@ -303,12 +307,17 @@ def _select_unused_attributes_ids( # (136723); # ...Using index # - seen_ids = { - state[0] - for state in session.execute( - attributes_ids_exist_in_states_with_fast_in_distinct(attributes_ids) - ).all() - } + for attributes_ids_chunk in chunked_or_all( + attributes_ids, instance.max_bind_vars + ): + seen_ids.update( + state[0] + for state in session.execute( + attributes_ids_exist_in_states_with_fast_in_distinct( + attributes_ids_chunk + ) + ).all() + ) else: # # This branch is for DBMS that cannot optimize the distinct query well and has @@ -334,7 +343,6 @@ def _select_unused_attributes_ids( # We now break the query into groups of 100 and use a lambda_stmt to ensure # that the query is only cached once. # - seen_ids = set() groups = [iter(attributes_ids)] * 100 for attr_ids in zip_longest(*groups, fillvalue=None): seen_ids |= { @@ -361,29 +369,33 @@ def _purge_unused_attributes_ids( database_engine = instance.database_engine assert database_engine is not None if unused_attribute_ids_set := _select_unused_attributes_ids( - session, attributes_ids_batch, database_engine + instance, session, attributes_ids_batch, database_engine ): _purge_batch_attributes_ids(instance, session, unused_attribute_ids_set) def _select_unused_event_data_ids( - session: Session, data_ids: set[int], database_engine: DatabaseEngine + instance: Recorder, + session: Session, + data_ids: set[int], + database_engine: DatabaseEngine, ) -> set[int]: """Return a set of event data ids that are not used by any events in the db.""" if not data_ids: return set() + seen_ids: set[int] = set() # See _select_unused_attributes_ids for why this function # branches for non-sqlite databases. if not database_engine.optimizer.slow_range_in_select: - seen_ids = { - state[0] - for state in session.execute( - data_ids_exist_in_events_with_fast_in_distinct(data_ids) - ).all() - } + for data_ids_chunk in chunked_or_all(data_ids, instance.max_bind_vars): + seen_ids.update( + state[0] + for state in session.execute( + data_ids_exist_in_events_with_fast_in_distinct(data_ids_chunk) + ).all() + ) else: - seen_ids = set() groups = [iter(data_ids)] * 100 for data_ids_group in zip_longest(*groups, fillvalue=None): seen_ids |= { @@ -404,7 +416,7 @@ def _purge_unused_data_ids( database_engine = instance.database_engine assert database_engine is not None if unused_data_ids_set := _select_unused_event_data_ids( - session, data_ids_batch, database_engine + instance, session, data_ids_batch, database_engine ): _purge_batch_data_ids(instance, session, unused_data_ids_set) @@ -519,7 +531,7 @@ def _purge_batch_attributes_ids( instance: Recorder, session: Session, attributes_ids: set[int] ) -> None: """Delete old attributes ids in batches of max_bind_vars.""" - for attributes_ids_chunk in chunked(attributes_ids, instance.max_bind_vars): + for attributes_ids_chunk in chunked_or_all(attributes_ids, instance.max_bind_vars): deleted_rows = session.execute( delete_states_attributes_rows(attributes_ids_chunk) ) @@ -533,7 +545,7 @@ def _purge_batch_data_ids( instance: Recorder, session: Session, data_ids: set[int] ) -> None: """Delete old event data ids in batches of max_bind_vars.""" - for data_ids_chunk in chunked(data_ids, instance.max_bind_vars): + for data_ids_chunk in chunked_or_all(data_ids, instance.max_bind_vars): deleted_rows = session.execute(delete_event_data_rows(data_ids_chunk)) _LOGGER.debug("Deleted %s data events", deleted_rows) @@ -694,7 +706,10 @@ def _purge_filtered_states( # we will need to purge them here. _purge_event_ids(session, filtered_event_ids) unused_attribute_ids_set = _select_unused_attributes_ids( - session, {id_ for id_ in attributes_ids if id_ is not None}, database_engine + instance, + session, + {id_ for id_ in attributes_ids if id_ is not None}, + database_engine, ) _purge_batch_attributes_ids(instance, session, unused_attribute_ids_set) return False @@ -741,7 +756,7 @@ def _purge_filtered_events( _purge_state_ids(instance, session, state_ids) _purge_event_ids(session, event_ids_set) if unused_data_ids_set := _select_unused_event_data_ids( - session, set(data_ids), database_engine + instance, session, set(data_ids), database_engine ): _purge_batch_data_ids(instance, session, unused_data_ids_set) return False diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index f94601bb2cb..2d518d8874b 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -1,7 +1,7 @@ """SQLAlchemy util functions.""" from __future__ import annotations -from collections.abc import Callable, Generator, Iterable, Sequence +from collections.abc import Callable, Collection, Generator, Iterable, Sequence from contextlib import contextmanager from datetime import date, datetime, timedelta import functools @@ -857,6 +857,20 @@ def chunked(iterable: Iterable, chunked_num: int) -> Iterable[Any]: return iter(partial(take, chunked_num, iter(iterable)), []) +def chunked_or_all(iterable: Collection[Any], chunked_num: int) -> Iterable[Any]: + """Break *collection* into iterables of length *n*. + + Returns the collection if its length is less than *n*. + + Unlike chunked, this function requires a collection so it can + determine the length of the collection and return the collection + if it is less than *n*. + """ + if len(iterable) <= chunked_num: + return (iterable,) + return chunked(iterable, chunked_num) + + def get_index_by_name(session: Session, table_name: str, index_name: str) -> str | None: """Get an index by name.""" connection = session.connection() diff --git a/homeassistant/components/remote/significant_change.py b/homeassistant/components/remote/significant_change.py new file mode 100644 index 00000000000..8e5a3669041 --- /dev/null +++ b/homeassistant/components/remote/significant_change.py @@ -0,0 +1,27 @@ +"""Helper to test significant Remote state changes.""" +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant, callback + +from . import ATTR_CURRENT_ACTIVITY + + +@callback +def async_check_significant_change( + hass: HomeAssistant, + old_state: str, + old_attrs: dict, + new_state: str, + new_attrs: dict, + **kwargs: Any, +) -> bool | None: + """Test if state significantly changed.""" + if old_state != new_state: + return True + + if old_attrs.get(ATTR_CURRENT_ACTIVITY) != new_attrs.get(ATTR_CURRENT_ACTIVITY): + return True + + return False diff --git a/homeassistant/components/renault/__init__.py b/homeassistant/components/renault/__init__.py index f69451290bc..6b5679088a0 100644 --- a/homeassistant/components/renault/__init__.py +++ b/homeassistant/components/renault/__init__.py @@ -28,7 +28,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b hass.data.setdefault(DOMAIN, {}) try: await renault_hub.async_initialise(config_entry) - except aiohttp.ClientResponseError as exc: + except aiohttp.ClientError as exc: raise ConfigEntryNotReady() from exc hass.data[DOMAIN][config_entry.entry_id] = renault_hub diff --git a/homeassistant/components/renault/coordinator.py b/homeassistant/components/renault/coordinator.py index d101b551dfe..f8e6a21823a 100644 --- a/homeassistant/components/renault/coordinator.py +++ b/homeassistant/components/renault/coordinator.py @@ -45,6 +45,7 @@ class RenaultDataUpdateCoordinator(DataUpdateCoordinator[T]): ) self.access_denied = False self.not_supported = False + self._has_already_worked = False async def _async_update_data(self) -> T: """Fetch the latest data from the source.""" @@ -52,11 +53,16 @@ class RenaultDataUpdateCoordinator(DataUpdateCoordinator[T]): raise NotImplementedError("Update method not implemented") try: async with _PARALLEL_SEMAPHORE: - return await self.update_method() + data = await self.update_method() + self._has_already_worked = True + return data + except AccessDeniedException as err: - # Disable because the account is not allowed to access this Renault endpoint. - self.update_interval = None - self.access_denied = True + # This can mean both a temporary error or a permanent error. If it has + # worked before, make it temporary, if not disable the update interval. + if not self._has_already_worked: + self.update_interval = None + self.access_denied = True raise UpdateFailed(f"This endpoint is denied: {err}") from err except NotSupportedException as err: diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index 8425f29fbe8..7f8448d277d 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -16,6 +16,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -89,9 +90,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)): await host.renew() - async def async_check_firmware_update() -> str | Literal[ - False - ] | NewSoftwareVersion: + async def async_check_firmware_update() -> ( + str | Literal[False] | NewSoftwareVersion + ): """Check for firmware updates.""" if not host.api.supported(None, "update"): return False @@ -148,6 +149,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b firmware_coordinator=firmware_coordinator, ) + cleanup_disconnected_cams(hass, config_entry.entry_id, host) + await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) config_entry.async_on_unload( @@ -175,3 +178,51 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> hass.data[DOMAIN].pop(config_entry.entry_id) return unload_ok + + +def cleanup_disconnected_cams( + hass: HomeAssistant, config_entry_id: str, host: ReolinkHost +) -> None: + """Clean-up disconnected camera channels.""" + if not host.api.is_nvr: + return + + device_reg = dr.async_get(hass) + devices = dr.async_entries_for_config_entry(device_reg, config_entry_id) + for device in devices: + device_id = [ + dev_id[1].split("_ch") + for dev_id in device.identifiers + if dev_id[0] == DOMAIN + ][0] + + if len(device_id) < 2: + # Do not consider the NVR itself + continue + + ch = int(device_id[1]) + ch_model = host.api.camera_model(ch) + remove = False + if ch not in host.api.channels: + remove = True + _LOGGER.debug( + "Removing Reolink device %s, " + "since no camera is connected to NVR channel %s anymore", + device.name, + ch, + ) + if ch_model not in [device.model, "Unknown"]: + remove = True + _LOGGER.debug( + "Removing Reolink device %s, " + "since the camera model connected to channel %s changed from %s to %s", + device.name, + ch, + device.model, + ch_model, + ) + if not remove: + continue + + # clean device registry and associated entities + device_reg.async_remove_device(device.id) diff --git a/homeassistant/components/reolink/binary_sensor.py b/homeassistant/components/reolink/binary_sensor.py index 7f2ff3e0053..226b81b1c74 100644 --- a/homeassistant/components/reolink/binary_sensor.py +++ b/homeassistant/components/reolink/binary_sensor.py @@ -25,25 +25,19 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity +from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription -@dataclass -class ReolinkBinarySensorEntityDescriptionMixin: - """Mixin values for Reolink binary sensor entities.""" - - value: Callable[[Host, int], bool] - - -@dataclass +@dataclass(kw_only=True) class ReolinkBinarySensorEntityDescription( - BinarySensorEntityDescription, ReolinkBinarySensorEntityDescriptionMixin + BinarySensorEntityDescription, + ReolinkChannelEntityDescription, ): """A class that describes binary sensor entities.""" - icon: str = "mdi:motion-sensor" icon_off: str = "mdi:motion-sensor-off" - supported: Callable[[Host, int], bool] = lambda host, ch: True + icon: str = "mdi:motion-sensor" + value: Callable[[Host, int], bool] BINARY_SENSORS = ( @@ -79,7 +73,18 @@ BINARY_SENSORS = ( icon="mdi:dog-side", icon_off="mdi:dog-side-off", value=lambda api, ch: api.ai_detected(ch, PET_DETECTION_TYPE), - supported=lambda api, ch: api.ai_supported(ch, PET_DETECTION_TYPE), + supported=lambda api, ch: ( + api.ai_supported(ch, PET_DETECTION_TYPE) + and not api.supported(ch, "ai_animal") + ), + ), + ReolinkBinarySensorEntityDescription( + key=PET_DETECTION_TYPE, + translation_key="animal", + icon="mdi:paw", + icon_off="mdi:paw-off", + value=lambda api, ch: api.ai_detected(ch, PET_DETECTION_TYPE), + supported=lambda api, ch: api.supported(ch, "ai_animal"), ), ReolinkBinarySensorEntityDescription( key="visitor", @@ -125,8 +130,8 @@ class ReolinkBinarySensorEntity(ReolinkChannelCoordinatorEntity, BinarySensorEnt entity_description: ReolinkBinarySensorEntityDescription, ) -> None: """Initialize Reolink binary sensor.""" - super().__init__(reolink_data, channel) self.entity_description = entity_description + super().__init__(reolink_data, channel) if self._host.api.model in DUAL_LENS_DUAL_MOTION_MODELS: if entity_description.translation_key is not None: @@ -135,10 +140,6 @@ class ReolinkBinarySensorEntity(ReolinkChannelCoordinatorEntity, BinarySensorEnt key = entity_description.key self._attr_translation_key = f"{key}_lens_{self._channel}" - self._attr_unique_id = ( - f"{self._host.unique_id}_{self._channel}_{entity_description.key}" - ) - @property def icon(self) -> str | None: """Icon of the sensor.""" diff --git a/homeassistant/components/reolink/button.py b/homeassistant/components/reolink/button.py index f1797527914..88204d9a806 100644 --- a/homeassistant/components/reolink/button.py +++ b/homeassistant/components/reolink/button.py @@ -6,53 +6,58 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import GuardEnum, Host, PtzEnum +from reolink_aio.exceptions import ReolinkError +import voluptuous as vol from homeassistant.components.button import ( ButtonDeviceClass, ButtonEntity, ButtonEntityDescription, ) +from homeassistant.components.camera import CameraEntityFeature from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity, ReolinkHostCoordinatorEntity +from .entity import ( + ReolinkChannelCoordinatorEntity, + ReolinkChannelEntityDescription, + ReolinkHostCoordinatorEntity, + ReolinkHostEntityDescription, +) + +ATTR_SPEED = "speed" +SUPPORT_PTZ_SPEED = CameraEntityFeature.STREAM -@dataclass -class ReolinkButtonEntityDescriptionMixin: - """Mixin values for Reolink button entities for a camera channel.""" - - method: Callable[[Host, int], Any] - - -@dataclass +@dataclass(kw_only=True) class ReolinkButtonEntityDescription( - ButtonEntityDescription, ReolinkButtonEntityDescriptionMixin + ButtonEntityDescription, + ReolinkChannelEntityDescription, ): """A class that describes button entities for a camera channel.""" - supported: Callable[[Host, int], bool] = lambda api, ch: True enabled_default: Callable[[Host, int], bool] | None = None + method: Callable[[Host, int], Any] + ptz_cmd: str | None = None -@dataclass -class ReolinkHostButtonEntityDescriptionMixin: - """Mixin values for Reolink button entities for the host.""" - - method: Callable[[Host], Any] - - -@dataclass +@dataclass(kw_only=True) class ReolinkHostButtonEntityDescription( - ButtonEntityDescription, ReolinkHostButtonEntityDescriptionMixin + ButtonEntityDescription, + ReolinkHostEntityDescription, ): """A class that describes button entities for the host.""" - supported: Callable[[Host], bool] = lambda api: True + method: Callable[[Host], Any] BUTTON_ENTITIES = ( @@ -61,8 +66,9 @@ BUTTON_ENTITIES = ( translation_key="ptz_stop", icon="mdi:pan", enabled_default=lambda api, ch: api.supported(ch, "pan_tilt"), - supported=lambda api, ch: api.supported(ch, "pan_tilt") - or api.supported(ch, "zoom_basic"), + supported=lambda api, ch: ( + api.supported(ch, "pan_tilt") or api.supported(ch, "zoom_basic") + ), method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.stop.value), ), ReolinkButtonEntityDescription( @@ -71,6 +77,7 @@ BUTTON_ENTITIES = ( icon="mdi:pan", supported=lambda api, ch: api.supported(ch, "pan"), method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.left.value), + ptz_cmd=PtzEnum.left.value, ), ReolinkButtonEntityDescription( key="ptz_right", @@ -78,6 +85,7 @@ BUTTON_ENTITIES = ( icon="mdi:pan", supported=lambda api, ch: api.supported(ch, "pan"), method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.right.value), + ptz_cmd=PtzEnum.right.value, ), ReolinkButtonEntityDescription( key="ptz_up", @@ -85,6 +93,7 @@ BUTTON_ENTITIES = ( icon="mdi:pan", supported=lambda api, ch: api.supported(ch, "tilt"), method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.up.value), + ptz_cmd=PtzEnum.up.value, ), ReolinkButtonEntityDescription( key="ptz_down", @@ -92,6 +101,7 @@ BUTTON_ENTITIES = ( icon="mdi:pan", supported=lambda api, ch: api.supported(ch, "tilt"), method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.down.value), + ptz_cmd=PtzEnum.down.value, ), ReolinkButtonEntityDescription( key="ptz_zoom_in", @@ -100,6 +110,7 @@ BUTTON_ENTITIES = ( entity_registry_enabled_default=False, supported=lambda api, ch: api.supported(ch, "zoom_basic"), method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.zoomin.value), + ptz_cmd=PtzEnum.zoomin.value, ), ReolinkButtonEntityDescription( key="ptz_zoom_out", @@ -108,6 +119,7 @@ BUTTON_ENTITIES = ( entity_registry_enabled_default=False, supported=lambda api, ch: api.supported(ch, "zoom_basic"), method=lambda api, ch: api.set_ptz_command(ch, command=PtzEnum.zoomout.value), + ptz_cmd=PtzEnum.zoomout.value, ), ReolinkButtonEntityDescription( key="ptz_calibrate", @@ -169,6 +181,14 @@ async def async_setup_entry( ) async_add_entities(entities) + platform = async_get_current_platform() + platform.async_register_entity_service( + "ptz_move", + {vol.Required(ATTR_SPEED): cv.positive_int}, + "async_ptz_move", + [SUPPORT_PTZ_SPEED], + ) + class ReolinkButtonEntity(ReolinkChannelCoordinatorEntity, ButtonEntity): """Base button entity class for Reolink IP cameras.""" @@ -182,20 +202,36 @@ class ReolinkButtonEntity(ReolinkChannelCoordinatorEntity, ButtonEntity): entity_description: ReolinkButtonEntityDescription, ) -> None: """Initialize Reolink button entity.""" - super().__init__(reolink_data, channel) self.entity_description = entity_description + super().__init__(reolink_data, channel) - self._attr_unique_id = ( - f"{self._host.unique_id}_{channel}_{entity_description.key}" - ) if entity_description.enabled_default is not None: self._attr_entity_registry_enabled_default = ( entity_description.enabled_default(self._host.api, self._channel) ) + if ( + self._host.api.supported(channel, "ptz_speed") + and entity_description.ptz_cmd is not None + ): + self._attr_supported_features = SUPPORT_PTZ_SPEED + async def async_press(self) -> None: """Execute the button action.""" - await self.entity_description.method(self._host.api, self._channel) + try: + await self.entity_description.method(self._host.api, self._channel) + except ReolinkError as err: + raise HomeAssistantError(err) from err + + async def async_ptz_move(self, **kwargs) -> None: + """PTZ move with speed.""" + speed = kwargs[ATTR_SPEED] + try: + await self._host.api.set_ptz_command( + self._channel, command=self.entity_description.ptz_cmd, speed=speed + ) + except ReolinkError as err: + raise HomeAssistantError(err) from err class ReolinkHostButtonEntity(ReolinkHostCoordinatorEntity, ButtonEntity): @@ -209,11 +245,12 @@ class ReolinkHostButtonEntity(ReolinkHostCoordinatorEntity, ButtonEntity): entity_description: ReolinkHostButtonEntityDescription, ) -> None: """Initialize Reolink button entity.""" - super().__init__(reolink_data) self.entity_description = entity_description - - self._attr_unique_id = f"{self._host.unique_id}_{entity_description.key}" + super().__init__(reolink_data) async def async_press(self) -> None: """Execute the button action.""" - await self.entity_description.method(self._host.api) + try: + await self.entity_description.method(self._host.api) + except ReolinkError as err: + raise HomeAssistantError(err) from err diff --git a/homeassistant/components/reolink/camera.py b/homeassistant/components/reolink/camera.py index b012649ec4c..2ad8105c66c 100644 --- a/homeassistant/components/reolink/camera.py +++ b/homeassistant/components/reolink/camera.py @@ -1,22 +1,93 @@ """Component providing support for Reolink IP cameras.""" from __future__ import annotations +from dataclasses import dataclass import logging from reolink_aio.api import DUAL_LENS_MODELS +from reolink_aio.exceptions import ReolinkError -from homeassistant.components.camera import Camera, CameraEntityFeature +from homeassistant.components.camera import ( + Camera, + CameraEntityDescription, + CameraEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity +from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription _LOGGER = logging.getLogger(__name__) +@dataclass(kw_only=True) +class ReolinkCameraEntityDescription( + CameraEntityDescription, + ReolinkChannelEntityDescription, +): + """A class that describes camera entities for a camera channel.""" + + stream: str + + +CAMERA_ENTITIES = ( + ReolinkCameraEntityDescription( + key="sub", + stream="sub", + translation_key="sub", + ), + ReolinkCameraEntityDescription( + key="main", + stream="main", + translation_key="main", + entity_registry_enabled_default=False, + ), + ReolinkCameraEntityDescription( + key="snapshots_sub", + stream="snapshots_sub", + translation_key="snapshots_sub", + entity_registry_enabled_default=False, + ), + ReolinkCameraEntityDescription( + key="snapshots", + stream="snapshots_main", + translation_key="snapshots_main", + entity_registry_enabled_default=False, + ), + ReolinkCameraEntityDescription( + key="ext", + stream="ext", + translation_key="ext", + supported=lambda api, ch: api.protocol in ["rtmp", "flv"], + entity_registry_enabled_default=False, + ), + ReolinkCameraEntityDescription( + key="autotrack_sub", + stream="autotrack_sub", + translation_key="autotrack_sub", + supported=lambda api, ch: api.supported(ch, "autotrack_stream"), + ), + ReolinkCameraEntityDescription( + key="autotrack_snapshots_sub", + stream="autotrack_snapshots_sub", + translation_key="autotrack_snapshots_sub", + supported=lambda api, ch: api.supported(ch, "autotrack_stream"), + entity_registry_enabled_default=False, + ), + ReolinkCameraEntityDescription( + key="autotrack_snapshots_main", + stream="autotrack_snapshots_main", + translation_key="autotrack_snapshots_main", + supported=lambda api, ch: api.supported(ch, "autotrack_stream"), + entity_registry_enabled_default=False, + ), +) + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -24,62 +95,58 @@ async def async_setup_entry( ) -> None: """Set up a Reolink IP Camera.""" reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] - host = reolink_data.host - cameras = [] - for channel in host.api.stream_channels: - streams = ["sub", "main", "snapshots_sub", "snapshots_main"] - if host.api.protocol in ["rtmp", "flv"]: - streams.append("ext") - - if host.api.supported(channel, "autotrack_stream"): - streams.extend( - ["autotrack_sub", "autotrack_snapshots_sub", "autotrack_snapshots_main"] - ) - - for stream in streams: - stream_url = await host.api.get_stream_source(channel, stream) - if stream_url is None and "snapshots" not in stream: + entities: list[ReolinkCamera] = [] + for entity_description in CAMERA_ENTITIES: + for channel in reolink_data.host.api.stream_channels: + if not entity_description.supported(reolink_data.host.api, channel): + continue + stream_url = await reolink_data.host.api.get_stream_source( + channel, entity_description.stream + ) + if stream_url is None and "snapshots" not in entity_description.stream: continue - cameras.append(ReolinkCamera(reolink_data, channel, stream)) - async_add_entities(cameras) + entities.append(ReolinkCamera(reolink_data, channel, entity_description)) + + async_add_entities(entities) class ReolinkCamera(ReolinkChannelCoordinatorEntity, Camera): """An implementation of a Reolink IP camera.""" _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM + entity_description: ReolinkCameraEntityDescription def __init__( self, reolink_data: ReolinkData, channel: int, - stream: str, + entity_description: ReolinkCameraEntityDescription, ) -> None: """Initialize Reolink camera stream.""" + self.entity_description = entity_description ReolinkChannelCoordinatorEntity.__init__(self, reolink_data, channel) Camera.__init__(self) - self._stream = stream - - stream_name = self._stream.replace("_", " ") if self._host.api.model in DUAL_LENS_MODELS: - self._attr_name = f"{stream_name} lens {self._channel}" - else: - self._attr_name = stream_name - stream_id = self._stream - if stream_id == "snapshots_main": - stream_id = "snapshots" - self._attr_unique_id = f"{self._host.unique_id}_{self._channel}_{stream_id}" - self._attr_entity_registry_enabled_default = stream in ["sub", "autotrack_sub"] + self._attr_translation_key = ( + f"{entity_description.translation_key}_lens_{self._channel}" + ) async def stream_source(self) -> str | None: """Return the source of the stream.""" - return await self._host.api.get_stream_source(self._channel, self._stream) + return await self._host.api.get_stream_source( + self._channel, self.entity_description.stream + ) async def async_camera_image( self, width: int | None = None, height: int | None = None ) -> bytes | None: """Return a still image response from the camera.""" - return await self._host.api.get_snapshot(self._channel, self._stream) + try: + return await self._host.api.get_snapshot( + self._channel, self.entity_description.stream + ) + except ReolinkError as err: + raise HomeAssistantError(err) from err diff --git a/homeassistant/components/reolink/diagnostics.py b/homeassistant/components/reolink/diagnostics.py new file mode 100644 index 00000000000..04b476296f8 --- /dev/null +++ b/homeassistant/components/reolink/diagnostics.py @@ -0,0 +1,46 @@ +"""Diagnostics support for Reolink.""" +from __future__ import annotations + +from typing import Any + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from . import ReolinkData +from .const import DOMAIN + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] + host = reolink_data.host + api = host.api + + IPC_cam: dict[int, dict[str, Any]] = {} + for ch in api.channels: + IPC_cam[ch] = {} + IPC_cam[ch]["model"] = api.camera_model(ch) + IPC_cam[ch]["firmware version"] = api.camera_sw_version(ch) + + return { + "model": api.model, + "hardware version": api.hardware_version, + "firmware version": api.sw_version, + "HTTPS": api.use_https, + "HTTP(S) port": api.port, + "WiFi connection": api.wifi_connection, + "WiFi signal": api.wifi_signal, + "RTMP enabled": api.rtmp_enabled, + "RTSP enabled": api.rtsp_enabled, + "ONVIF enabled": api.onvif_enabled, + "event connection": host.event_connection, + "stream protocol": api.protocol, + "channels": api.channels, + "stream channels": api.stream_channels, + "IPC cams": IPC_cam, + "capabilities": api.capabilities, + "api versions": api.checked_api_versions, + "abilities": api.abilities, + } diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index e7d62c9705a..584b380f391 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -1,11 +1,14 @@ """Reolink parent entity class.""" from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass from typing import TypeVar -from reolink_aio.api import DUAL_LENS_MODELS +from reolink_aio.api import DUAL_LENS_MODELS, Host from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, @@ -17,8 +20,22 @@ from .const import DOMAIN _T = TypeVar("_T") +@dataclass(kw_only=True) +class ReolinkChannelEntityDescription(EntityDescription): + """A class that describes entities for a camera channel.""" + + supported: Callable[[Host, int], bool] = lambda api, ch: True + + +@dataclass(kw_only=True) +class ReolinkHostEntityDescription(EntityDescription): + """A class that describes host entities.""" + + supported: Callable[[Host], bool] = lambda api: True + + class ReolinkBaseCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[_T]]): - """Parent class fo Reolink entities.""" + """Parent class for Reolink entities.""" _attr_has_entity_name = True @@ -42,6 +59,7 @@ class ReolinkBaseCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[_T]]) manufacturer=self._host.api.manufacturer, hw_version=self._host.api.hardware_version, sw_version=self._host.api.sw_version, + serial_number=self._host.api.uid, configuration_url=self._conf_url, ) @@ -58,14 +76,20 @@ class ReolinkHostCoordinatorEntity(ReolinkBaseCoordinatorEntity[None]): basically a NVR with a single channel that has the camera connected to that channel. """ + entity_description: ReolinkHostEntityDescription | ReolinkChannelEntityDescription + def __init__(self, reolink_data: ReolinkData) -> None: """Initialize ReolinkHostCoordinatorEntity.""" super().__init__(reolink_data, reolink_data.device_coordinator) + self._attr_unique_id = f"{self._host.unique_id}_{self.entity_description.key}" + class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): """Parent class for Reolink hardware camera entities connected to a channel of the NVR.""" + entity_description: ReolinkChannelEntityDescription + def __init__( self, reolink_data: ReolinkData, @@ -75,6 +99,9 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): super().__init__(reolink_data) self._channel = channel + self._attr_unique_id = ( + f"{self._host.unique_id}_{channel}_{self.entity_description.key}" + ) dev_ch = channel if self._host.api.model in DUAL_LENS_MODELS: @@ -87,5 +114,6 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): name=self._host.api.camera_name(dev_ch), model=self._host.api.camera_model(dev_ch), manufacturer=self._host.api.manufacturer, + sw_version=self._host.api.camera_sw_version(dev_ch), configuration_url=self._conf_url, ) diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index 0075bbac4e6..f6eb4cb0e55 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -661,3 +661,12 @@ class ReolinkHost: for channel in channels: async_dispatcher_send(self._hass, f"{self.webhook_id}_{channel}", {}) + + @property + def event_connection(self) -> str: + """Type of connection to receive events.""" + if self._webhook_reachable: + return "ONVIF push" + if self._long_poll_received: + return "ONVIF long polling" + return "Fast polling" diff --git a/homeassistant/components/reolink/light.py b/homeassistant/components/reolink/light.py index 938093df4a3..b2d0402b1b9 100644 --- a/homeassistant/components/reolink/light.py +++ b/homeassistant/components/reolink/light.py @@ -6,6 +6,7 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Host +from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -16,30 +17,25 @@ from homeassistant.components.light import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity +from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription -@dataclass -class ReolinkLightEntityDescriptionMixin: - """Mixin values for Reolink light entities.""" - - is_on_fn: Callable[[Host, int], bool] - turn_on_off_fn: Callable[[Host, int, bool], Any] - - -@dataclass +@dataclass(kw_only=True) class ReolinkLightEntityDescription( - LightEntityDescription, ReolinkLightEntityDescriptionMixin + LightEntityDescription, + ReolinkChannelEntityDescription, ): """A class that describes light entities.""" - supported_fn: Callable[[Host, int], bool] = lambda api, ch: True get_brightness_fn: Callable[[Host, int], int | None] | None = None + is_on_fn: Callable[[Host, int], bool] set_brightness_fn: Callable[[Host, int, int], Any] | None = None + turn_on_off_fn: Callable[[Host, int, bool], Any] LIGHT_ENTITIES = ( @@ -47,7 +43,7 @@ LIGHT_ENTITIES = ( key="floodlight", translation_key="floodlight", icon="mdi:spotlight-beam", - supported_fn=lambda api, ch: api.supported(ch, "floodLight"), + supported=lambda api, ch: api.supported(ch, "floodLight"), is_on_fn=lambda api, ch: api.whiteled_state(ch), turn_on_off_fn=lambda api, ch, value: api.set_whiteled(ch, state=value), get_brightness_fn=lambda api, ch: api.whiteled_brightness(ch), @@ -58,7 +54,7 @@ LIGHT_ENTITIES = ( translation_key="ir_lights", icon="mdi:led-off", entity_category=EntityCategory.CONFIG, - supported_fn=lambda api, ch: api.supported(ch, "ir_lights"), + supported=lambda api, ch: api.supported(ch, "ir_lights"), is_on_fn=lambda api, ch: api.ir_enabled(ch), turn_on_off_fn=lambda api, ch, value: api.set_ir_lights(ch, value), ), @@ -67,7 +63,7 @@ LIGHT_ENTITIES = ( translation_key="status_led", icon="mdi:lightning-bolt-circle", entity_category=EntityCategory.CONFIG, - supported_fn=lambda api, ch: api.supported(ch, "power_led"), + supported=lambda api, ch: api.supported(ch, "power_led"), is_on_fn=lambda api, ch: api.status_led_enabled(ch), turn_on_off_fn=lambda api, ch, value: api.set_status_led(ch, value), ), @@ -86,7 +82,7 @@ async def async_setup_entry( ReolinkLightEntity(reolink_data, channel, entity_description) for entity_description in LIGHT_ENTITIES for channel in reolink_data.host.api.channels - if entity_description.supported_fn(reolink_data.host.api, channel) + if entity_description.supported(reolink_data.host.api, channel) ) @@ -102,12 +98,8 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): entity_description: ReolinkLightEntityDescription, ) -> None: """Initialize Reolink light entity.""" - super().__init__(reolink_data, channel) self.entity_description = entity_description - - self._attr_unique_id = ( - f"{self._host.unique_id}_{channel}_{entity_description.key}" - ) + super().__init__(reolink_data, channel) if entity_description.set_brightness_fn is None: self._attr_supported_color_modes = {ColorMode.ONOFF} @@ -137,9 +129,12 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn light off.""" - await self.entity_description.turn_on_off_fn( - self._host.api, self._channel, False - ) + try: + await self.entity_description.turn_on_off_fn( + self._host.api, self._channel, False + ) + except ReolinkError as err: + raise HomeAssistantError(err) from err self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: @@ -148,11 +143,19 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): brightness := kwargs.get(ATTR_BRIGHTNESS) ) is not None and self.entity_description.set_brightness_fn is not None: brightness_pct = int(brightness / 255.0 * 100) - await self.entity_description.set_brightness_fn( - self._host.api, self._channel, brightness_pct - ) + try: + await self.entity_description.set_brightness_fn( + self._host.api, self._channel, brightness_pct + ) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err - await self.entity_description.turn_on_off_fn( - self._host.api, self._channel, True - ) + try: + await self.entity_description.turn_on_off_fn( + self._host.api, self._channel, True + ) + except ReolinkError as err: + raise HomeAssistantError(err) from err self.async_write_ha_state() diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 58785c1d795..5ffbc2fb186 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.7.15"] + "requirements": ["reolink-aio==0.8.1"] } diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py new file mode 100644 index 00000000000..6a350e13836 --- /dev/null +++ b/homeassistant/components/reolink/media_source.py @@ -0,0 +1,330 @@ +"""Expose Reolink IP camera VODs as media sources.""" + +from __future__ import annotations + +import datetime as dt +import logging + +from homeassistant.components.camera import DOMAIN as CAM_DOMAIN, DynamicStreamSettings +from homeassistant.components.media_player import MediaClass, MediaType +from homeassistant.components.media_source.error import Unresolvable +from homeassistant.components.media_source.models import ( + BrowseMediaSource, + MediaSource, + MediaSourceItem, + PlayMedia, +) +from homeassistant.components.stream import create_stream +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from . import ReolinkData +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +async def async_get_media_source(hass: HomeAssistant) -> ReolinkVODMediaSource: + """Set up camera media source.""" + return ReolinkVODMediaSource(hass) + + +def res_name(stream: str) -> str: + """Return the user friendly name for a stream.""" + return "High res." if stream == "main" else "Low res." + + +class ReolinkVODMediaSource(MediaSource): + """Provide Reolink camera VODs as media sources.""" + + name: str = "Reolink" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize ReolinkVODMediaSource.""" + super().__init__(DOMAIN) + self.hass = hass + self.data: dict[str, ReolinkData] = hass.data[DOMAIN] + + async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: + """Resolve media to a url.""" + identifier = item.identifier.split("|", 5) + if identifier[0] != "FILE": + raise Unresolvable(f"Unknown media item '{item.identifier}'.") + + _, config_entry_id, channel_str, stream_res, filename = identifier + channel = int(channel_str) + + host = self.data[config_entry_id].host + mime_type, url = await host.api.get_vod_source(channel, filename, stream_res) + if _LOGGER.isEnabledFor(logging.DEBUG): + url_log = f"{url.split('&user=')[0]}&user=xxxxx&password=xxxxx" + _LOGGER.debug( + "Opening VOD stream from %s: %s", host.api.camera_name(channel), url_log + ) + + stream = create_stream(self.hass, url, {}, DynamicStreamSettings()) + stream.add_provider("hls", timeout=3600) + stream_url: str = stream.endpoint_url("hls") + stream_url = stream_url.replace("master_", "") + return PlayMedia(stream_url, mime_type) + + async def async_browse_media( + self, + item: MediaSourceItem, + ) -> BrowseMediaSource: + """Return media.""" + if item.identifier is None: + return await self._async_generate_root() + + identifier = item.identifier.split("|", 7) + item_type = identifier[0] + + if item_type == "CAM": + _, config_entry_id, channel_str = identifier + return await self._async_generate_resolution_select( + config_entry_id, int(channel_str) + ) + if item_type == "RES": + _, config_entry_id, channel_str, stream = identifier + return await self._async_generate_camera_days( + config_entry_id, int(channel_str), stream + ) + if item_type == "DAY": + ( + _, + config_entry_id, + channel_str, + stream, + year_str, + month_str, + day_str, + ) = identifier + return await self._async_generate_camera_files( + config_entry_id, + int(channel_str), + stream, + int(year_str), + int(month_str), + int(day_str), + ) + + raise Unresolvable(f"Unknown media item '{item.identifier}' during browsing.") + + async def _async_generate_root(self) -> BrowseMediaSource: + """Return all available reolink cameras as root browsing structure.""" + children: list[BrowseMediaSource] = [] + + entity_reg = er.async_get(self.hass) + device_reg = dr.async_get(self.hass) + for config_entry in self.hass.config_entries.async_entries(DOMAIN): + if config_entry.state != ConfigEntryState.LOADED: + continue + channels: list[str] = [] + host = self.data[config_entry.entry_id].host + entities = er.async_entries_for_config_entry( + entity_reg, config_entry.entry_id + ) + for entity in entities: + if ( + entity.disabled + or entity.device_id is None + or entity.domain != CAM_DOMAIN + ): + continue + + device = device_reg.async_get(entity.device_id) + ch = entity.unique_id.split("_")[1] + if ch in channels or device is None: + continue + channels.append(ch) + + if ( + host.api.api_version("recReplay", int(ch)) < 1 + or not host.api.hdd_info + ): + # playback stream not supported by this camera or no storage installed + continue + + device_name = device.name + if device.name_by_user is not None: + device_name = device.name_by_user + + children.append( + BrowseMediaSource( + domain=DOMAIN, + identifier=f"CAM|{config_entry.entry_id}|{ch}", + media_class=MediaClass.CHANNEL, + media_content_type=MediaType.PLAYLIST, + title=device_name, + thumbnail=f"/api/camera_proxy/{entity.entity_id}", + can_play=False, + can_expand=True, + ) + ) + + return BrowseMediaSource( + domain=DOMAIN, + identifier=None, + media_class=MediaClass.APP, + media_content_type="", + title="Reolink", + can_play=False, + can_expand=True, + children=children, + ) + + async def _async_generate_resolution_select( + self, config_entry_id: str, channel: int + ) -> BrowseMediaSource: + """Allow the user to select the high or low playback resolution, (low loads faster).""" + host = self.data[config_entry_id].host + + main_enc = await host.api.get_encoding(channel, "main") + if main_enc == "h265": + _LOGGER.debug( + "Reolink camera %s uses h265 encoding for main stream," + "playback only possible using sub stream", + host.api.camera_name(channel), + ) + return await self._async_generate_camera_days( + config_entry_id, channel, "sub" + ) + + children = [ + BrowseMediaSource( + domain=DOMAIN, + identifier=f"RES|{config_entry_id}|{channel}|sub", + media_class=MediaClass.CHANNEL, + media_content_type=MediaType.PLAYLIST, + title="Low resolution", + can_play=False, + can_expand=True, + ), + BrowseMediaSource( + domain=DOMAIN, + identifier=f"RES|{config_entry_id}|{channel}|main", + media_class=MediaClass.CHANNEL, + media_content_type=MediaType.PLAYLIST, + title="High resolution", + can_play=False, + can_expand=True, + ), + ] + + return BrowseMediaSource( + domain=DOMAIN, + identifier=f"RESs|{config_entry_id}|{channel}", + media_class=MediaClass.CHANNEL, + media_content_type=MediaType.PLAYLIST, + title=host.api.camera_name(channel), + can_play=False, + can_expand=True, + children=children, + ) + + async def _async_generate_camera_days( + self, config_entry_id: str, channel: int, stream: str + ) -> BrowseMediaSource: + """Return all days on which recordings are available for a reolink camera.""" + host = self.data[config_entry_id].host + + # We want today of the camera, not necessarily today of the server + now = host.api.time() or await host.api.async_get_time() + start = now - dt.timedelta(days=31) + end = now + + children: list[BrowseMediaSource] = [] + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "Requesting recording days of %s from %s to %s", + host.api.camera_name(channel), + start, + end, + ) + statuses, _ = await host.api.request_vod_files( + channel, start, end, status_only=True, stream=stream + ) + for status in statuses: + for day in status.days: + children.append( + BrowseMediaSource( + domain=DOMAIN, + identifier=f"DAY|{config_entry_id}|{channel}|{stream}|{status.year}|{status.month}|{day}", + media_class=MediaClass.DIRECTORY, + media_content_type=MediaType.PLAYLIST, + title=f"{status.year}/{status.month}/{day}", + can_play=False, + can_expand=True, + ) + ) + + return BrowseMediaSource( + domain=DOMAIN, + identifier=f"DAYS|{config_entry_id}|{channel}|{stream}", + media_class=MediaClass.CHANNEL, + media_content_type=MediaType.PLAYLIST, + title=f"{host.api.camera_name(channel)} {res_name(stream)}", + can_play=False, + can_expand=True, + children=children, + ) + + async def _async_generate_camera_files( + self, + config_entry_id: str, + channel: int, + stream: str, + year: int, + month: int, + day: int, + ) -> BrowseMediaSource: + """Return all recording files on a specific day of a Reolink camera.""" + host = self.data[config_entry_id].host + + start = dt.datetime(year, month, day, hour=0, minute=0, second=0) + end = dt.datetime(year, month, day, hour=23, minute=59, second=59) + + children: list[BrowseMediaSource] = [] + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "Requesting VODs of %s on %s/%s/%s", + host.api.camera_name(channel), + year, + month, + day, + ) + _, vod_files = await host.api.request_vod_files( + channel, start, end, stream=stream + ) + for file in vod_files: + file_name = f"{file.start_time.time()} {file.duration}" + if file.triggers != file.triggers.NONE: + file_name += " " + " ".join( + str(trigger.name).title() + for trigger in file.triggers + if trigger != trigger.NONE + ) + + children.append( + BrowseMediaSource( + domain=DOMAIN, + identifier=f"FILE|{config_entry_id}|{channel}|{stream}|{file.file_name}", + media_class=MediaClass.VIDEO, + media_content_type=MediaType.VIDEO, + title=file_name, + can_play=True, + can_expand=False, + ) + ) + + return BrowseMediaSource( + domain=DOMAIN, + identifier=f"FILES|{config_entry_id}|{channel}|{stream}", + media_class=MediaClass.CHANNEL, + media_content_type=MediaType.PLAYLIST, + title=f"{host.api.camera_name(channel)} {res_name(stream)} {year}/{month}/{day}", + can_play=False, + can_expand=True, + children=children, + ) diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index 6be0cef1670..6a89eabba2b 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -6,6 +6,7 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Host +from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.number import ( NumberEntity, @@ -15,31 +16,26 @@ from homeassistant.components.number import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity +from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription -@dataclass -class ReolinkNumberEntityDescriptionMixin: - """Mixin values for Reolink number entities.""" - - value: Callable[[Host, int], float | None] - method: Callable[[Host, int, float], Any] - - -@dataclass +@dataclass(kw_only=True) class ReolinkNumberEntityDescription( - NumberEntityDescription, ReolinkNumberEntityDescriptionMixin + NumberEntityDescription, + ReolinkChannelEntityDescription, ): """A class that describes number entities.""" - mode: NumberMode = NumberMode.AUTO - supported: Callable[[Host, int], bool] = lambda api, ch: True - get_min_value: Callable[[Host, int], float] | None = None get_max_value: Callable[[Host, int], float] | None = None + get_min_value: Callable[[Host, int], float] | None = None + method: Callable[[Host, int, float], Any] + mode: NumberMode = NumberMode.AUTO + value: Callable[[Host, int], float | None] NUMBER_ENTITIES = ( @@ -170,7 +166,23 @@ NUMBER_ENTITIES = ( native_min_value=0, native_max_value=100, supported=lambda api, ch: ( - api.supported(ch, "ai_sensitivity") and api.ai_supported(ch, "dog_cat") + api.supported(ch, "ai_sensitivity") + and api.ai_supported(ch, "dog_cat") + and not api.supported(ch, "ai_animal") + ), + value=lambda api, ch: api.ai_sensitivity(ch, "dog_cat"), + method=lambda api, ch, value: api.set_ai_sensitivity(ch, int(value), "dog_cat"), + ), + ReolinkNumberEntityDescription( + key="ai_pet_sensititvity", + translation_key="ai_animal_sensititvity", + icon="mdi:paw", + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: ( + api.supported(ch, "ai_sensitivity") and api.supported(ch, "ai_animal") ), value=lambda api, ch: api.ai_sensitivity(ch, "dog_cat"), method=lambda api, ch, value: api.set_ai_sensitivity(ch, int(value), "dog_cat"), @@ -234,7 +246,25 @@ NUMBER_ENTITIES = ( native_min_value=0, native_max_value=8, supported=lambda api, ch: ( - api.supported(ch, "ai_delay") and api.ai_supported(ch, "dog_cat") + api.supported(ch, "ai_delay") + and api.ai_supported(ch, "dog_cat") + and not api.supported(ch, "ai_animal") + ), + value=lambda api, ch: api.ai_delay(ch, "dog_cat"), + method=lambda api, ch, value: api.set_ai_delay(ch, int(value), "dog_cat"), + ), + ReolinkNumberEntityDescription( + key="ai_pet_delay", + translation_key="ai_animal_delay", + icon="mdi:paw", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=0, + native_max_value=8, + supported=lambda api, ch: ( + api.supported(ch, "ai_delay") and api.supported(ch, "ai_animal") ), value=lambda api, ch: api.ai_delay(ch, "dog_cat"), method=lambda api, ch, value: api.set_ai_delay(ch, int(value), "dog_cat"), @@ -306,6 +336,19 @@ NUMBER_ENTITIES = ( value=lambda api, ch: api.auto_track_stop_time(ch), method=lambda api, ch, value: api.set_auto_tracking(ch, stop_time=int(value)), ), + ReolinkNumberEntityDescription( + key="day_night_switch_threshold", + translation_key="day_night_switch_threshold", + icon="mdi:theme-light-dark", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "dayNightThreshold"), + value=lambda api, ch: api.daynight_threshold(ch), + method=lambda api, ch, value: api.set_daynight_threshold(ch, int(value)), + ), ) @@ -337,8 +380,8 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): entity_description: ReolinkNumberEntityDescription, ) -> None: """Initialize Reolink number entity.""" - super().__init__(reolink_data, channel) self.entity_description = entity_description + super().__init__(reolink_data, channel) if entity_description.get_min_value is not None: self._attr_native_min_value = entity_description.get_min_value( @@ -349,9 +392,6 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): self._host.api, channel ) self._attr_mode = entity_description.mode - self._attr_unique_id = ( - f"{self._host.unique_id}_{channel}_{entity_description.key}" - ) @property def native_value(self) -> float | None: @@ -360,5 +400,10 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): async def async_set_native_value(self, value: float) -> None: """Update the current value.""" - await self.entity_description.method(self._host.api, self._channel, value) + try: + await self.entity_description.method(self._host.api, self._channel, value) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err self.async_write_ha_state() diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index fd42e69268d..3d75b08b5d1 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -13,35 +13,31 @@ from reolink_aio.api import ( StatusLedEnum, TrackMethodEnum, ) +from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity +from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription _LOGGER = logging.getLogger(__name__) -@dataclass -class ReolinkSelectEntityDescriptionMixin: - """Mixin values for Reolink select entities.""" - - method: Callable[[Host, int, str], Any] - get_options: list[str] | Callable[[Host, int], list[str]] - - -@dataclass +@dataclass(kw_only=True) class ReolinkSelectEntityDescription( - SelectEntityDescription, ReolinkSelectEntityDescriptionMixin + SelectEntityDescription, + ReolinkChannelEntityDescription, ): """A class that describes select entities.""" - supported: Callable[[Host, int], bool] = lambda api, ch: True + get_options: list[str] | Callable[[Host, int], list[str]] + method: Callable[[Host, int, str], Any] value: Callable[[Host, int], str] | None = None @@ -137,14 +133,10 @@ class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): entity_description: ReolinkSelectEntityDescription, ) -> None: """Initialize Reolink select entity.""" - super().__init__(reolink_data, channel) self.entity_description = entity_description + super().__init__(reolink_data, channel) self._log_error = True - self._attr_unique_id = ( - f"{self._host.unique_id}_{channel}_{entity_description.key}" - ) - if callable(entity_description.get_options): self._attr_options = entity_description.get_options(self._host.api, channel) else: @@ -169,5 +161,10 @@ class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): async def async_select_option(self, option: str) -> None: """Change the selected option.""" - await self.entity_description.method(self._host.api, self._channel, option) + try: + await self.entity_description.method(self._host.api, self._channel, option) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err self.async_write_ha_state() diff --git a/homeassistant/components/reolink/sensor.py b/homeassistant/components/reolink/sensor.py index b9e8ddb8e73..3a5da97dc61 100644 --- a/homeassistant/components/reolink/sensor.py +++ b/homeassistant/components/reolink/sensor.py @@ -21,39 +21,32 @@ from homeassistant.helpers.typing import StateType from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity, ReolinkHostCoordinatorEntity +from .entity import ( + ReolinkChannelCoordinatorEntity, + ReolinkChannelEntityDescription, + ReolinkHostCoordinatorEntity, + ReolinkHostEntityDescription, +) -@dataclass -class ReolinkSensorEntityDescriptionMixin: - """Mixin values for Reolink sensor entities for a camera channel.""" +@dataclass(kw_only=True) +class ReolinkSensorEntityDescription( + SensorEntityDescription, + ReolinkChannelEntityDescription, +): + """A class that describes sensor entities for a camera channel.""" value: Callable[[Host, int], int] -@dataclass -class ReolinkSensorEntityDescription( - SensorEntityDescription, ReolinkSensorEntityDescriptionMixin -): - """A class that describes sensor entities for a camera channel.""" - - supported: Callable[[Host, int], bool] = lambda api, ch: True - - -@dataclass -class ReolinkHostSensorEntityDescriptionMixin: - """Mixin values for Reolink host sensor entities.""" - - value: Callable[[Host], int | None] - - -@dataclass +@dataclass(kw_only=True) class ReolinkHostSensorEntityDescription( - SensorEntityDescription, ReolinkHostSensorEntityDescriptionMixin + SensorEntityDescription, + ReolinkHostEntityDescription, ): """A class that describes host sensor entities.""" - supported: Callable[[Host], bool] = lambda api: True + value: Callable[[Host], int | None] SENSORS = ( @@ -118,12 +111,8 @@ class ReolinkSensorEntity(ReolinkChannelCoordinatorEntity, SensorEntity): entity_description: ReolinkSensorEntityDescription, ) -> None: """Initialize Reolink sensor.""" - super().__init__(reolink_data, channel) self.entity_description = entity_description - - self._attr_unique_id = ( - f"{self._host.unique_id}_{channel}_{entity_description.key}" - ) + super().__init__(reolink_data, channel) @property def native_value(self) -> StateType | date | datetime | Decimal: @@ -142,10 +131,8 @@ class ReolinkHostSensorEntity(ReolinkHostCoordinatorEntity, SensorEntity): entity_description: ReolinkHostSensorEntityDescription, ) -> None: """Initialize Reolink host sensor.""" - super().__init__(reolink_data) self.entity_description = entity_description - - self._attr_unique_id = f"{self._host.unique_id}_{entity_description.key}" + super().__init__(reolink_data) @property def native_value(self) -> StateType | date | datetime | Decimal: diff --git a/homeassistant/components/reolink/services.yaml b/homeassistant/components/reolink/services.yaml new file mode 100644 index 00000000000..42b9af34eb0 --- /dev/null +++ b/homeassistant/components/reolink/services.yaml @@ -0,0 +1,18 @@ +# Describes the format for available reolink services + +ptz_move: + target: + entity: + integration: reolink + domain: button + supported_features: + - camera.CameraEntityFeature.STREAM + fields: + speed: + required: true + default: 10 + selector: + number: + min: 1 + max: 64 + step: 1 diff --git a/homeassistant/components/reolink/siren.py b/homeassistant/components/reolink/siren.py index c91f633ecab..ec709f6ae3d 100644 --- a/homeassistant/components/reolink/siren.py +++ b/homeassistant/components/reolink/siren.py @@ -1,11 +1,10 @@ """Component providing support for Reolink siren entities.""" from __future__ import annotations -from collections.abc import Callable from dataclasses import dataclass from typing import Any -from reolink_aio.api import Host +from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.siren import ( ATTR_DURATION, @@ -16,19 +15,20 @@ from homeassistant.components.siren import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity +from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription @dataclass -class ReolinkSirenEntityDescription(SirenEntityDescription): +class ReolinkSirenEntityDescription( + SirenEntityDescription, ReolinkChannelEntityDescription +): """A class that describes siren entities.""" - supported: Callable[[Host, int], bool] = lambda api, ch: True - SIREN_ENTITIES = ( ReolinkSirenEntityDescription( @@ -74,20 +74,29 @@ class ReolinkSirenEntity(ReolinkChannelCoordinatorEntity, SirenEntity): entity_description: ReolinkSirenEntityDescription, ) -> None: """Initialize Reolink siren entity.""" - super().__init__(reolink_data, channel) self.entity_description = entity_description - - self._attr_unique_id = ( - f"{self._host.unique_id}_{channel}_{entity_description.key}" - ) + super().__init__(reolink_data, channel) async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the siren.""" if (volume := kwargs.get(ATTR_VOLUME_LEVEL)) is not None: - await self._host.api.set_volume(self._channel, int(volume * 100)) + try: + await self._host.api.set_volume(self._channel, int(volume * 100)) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err duration = kwargs.get(ATTR_DURATION) - await self._host.api.set_siren(self._channel, True, duration) + try: + await self._host.api.set_siren(self._channel, True, duration) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the siren.""" - await self._host.api.set_siren(self._channel, False, None) + try: + await self._host.api.set_siren(self._channel, False, None) + except ReolinkError as err: + raise HomeAssistantError(err) from err diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 0a496d62522..5b26d70b657 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -61,6 +61,18 @@ "description": "\"{name}\" with model \"{model}\" and hardware version \"{hw_version}\" is running a old firmware version \"{current_firmware}\", while at least firmware version \"{required_firmware}\" is required for proper operation of the Reolink integration. The latest firmware can be downloaded from the [Reolink download center]({download_link})." } }, + "services": { + "ptz_move": { + "name": "PTZ move", + "description": "Move the camera with a specific speed.", + "fields": { + "speed": { + "name": "Speed", + "description": "PTZ move speed." + } + } + } + }, "entity": { "binary_sensor": { "face": { @@ -75,6 +87,9 @@ "pet": { "name": "Pet" }, + "animal": { + "name": "Animal" + }, "visitor": { "name": "Visitor" }, @@ -93,6 +108,9 @@ "pet_lens_0": { "name": "Pet lens 0" }, + "animal_lens_0": { + "name": "Animal lens 0" + }, "visitor_lens_0": { "name": "Visitor lens 0" }, @@ -111,6 +129,9 @@ "pet_lens_1": { "name": "Pet lens 1" }, + "animal_lens_1": { + "name": "Animal lens 1" + }, "visitor_lens_1": { "name": "Visitor lens 1" } @@ -147,6 +168,62 @@ "name": "Guard set current position" } }, + "camera": { + "sub": { + "name": "Fluent" + }, + "main": { + "name": "Clear" + }, + "snapshots_sub": { + "name": "Snapshots fluent" + }, + "snapshots_main": { + "name": "Snapshots clear" + }, + "ext": { + "name": "Balanced" + }, + "sub_lens_0": { + "name": "Fluent lens 0" + }, + "main_lens_0": { + "name": "Clear lens 0" + }, + "snapshots_sub_lens_0": { + "name": "Snapshots fluent lens 0" + }, + "snapshots_main_lens_0": { + "name": "Snapshots clear lens 0" + }, + "ext_lens_0": { + "name": "Balanced lens 0" + }, + "sub_lens_1": { + "name": "Fluent lens 1" + }, + "main_lens_1": { + "name": "Clear lens 1" + }, + "snapshots_sub_lens_1": { + "name": "Snapshots fluent lens 1" + }, + "snapshots_main_lens_1": { + "name": "Snapshots clear lens 1" + }, + "ext_lens_1": { + "name": "Balanced lens 1" + }, + "autotrack_sub": { + "name": "Autotrack fluent" + }, + "autotrack_snapshots_sub": { + "name": "Autotrack snapshots fluent" + }, + "autotrack_snapshots_main": { + "name": "Autotrack snapshots clear" + } + }, "light": { "floodlight": { "name": "Floodlight" @@ -189,6 +266,9 @@ "ai_pet_sensititvity": { "name": "AI pet sensitivity" }, + "ai_animal_sensititvity": { + "name": "AI animal sensitivity" + }, "ai_face_delay": { "name": "AI face delay" }, @@ -201,6 +281,9 @@ "ai_pet_delay": { "name": "AI pet delay" }, + "ai_animal_delay": { + "name": "AI animal delay" + }, "auto_quick_reply_time": { "name": "Auto quick reply time" }, @@ -215,6 +298,9 @@ }, "auto_track_stop_time": { "name": "Auto track stop time" + }, + "day_night_switch_threshold": { + "name": "Day night switch threshold" } }, "select": { @@ -234,7 +320,7 @@ "state": { "auto": "Auto", "color": "Color", - "blackwhite": "Black&White" + "blackwhite": "Black & white" } }, "ptz_preset": { diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index f07db00e720..fbb8922188d 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -6,50 +6,45 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Host +from reolink_aio.exceptions import ReolinkError from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity, ReolinkHostCoordinatorEntity +from .entity import ( + ReolinkChannelCoordinatorEntity, + ReolinkChannelEntityDescription, + ReolinkHostCoordinatorEntity, + ReolinkHostEntityDescription, +) -@dataclass -class ReolinkSwitchEntityDescriptionMixin: - """Mixin values for Reolink switch entities.""" - - value: Callable[[Host, int], bool] - method: Callable[[Host, int, bool], Any] - - -@dataclass +@dataclass(kw_only=True) class ReolinkSwitchEntityDescription( - SwitchEntityDescription, ReolinkSwitchEntityDescriptionMixin + SwitchEntityDescription, + ReolinkChannelEntityDescription, ): """A class that describes switch entities.""" - supported: Callable[[Host, int], bool] = lambda api, ch: True + method: Callable[[Host, int, bool], Any] + value: Callable[[Host, int], bool] -@dataclass -class ReolinkNVRSwitchEntityDescriptionMixin: - """Mixin values for Reolink NVR switch entities.""" - - value: Callable[[Host], bool] - method: Callable[[Host, bool], Any] - - -@dataclass +@dataclass(kw_only=True) class ReolinkNVRSwitchEntityDescription( - SwitchEntityDescription, ReolinkNVRSwitchEntityDescriptionMixin + SwitchEntityDescription, + ReolinkHostEntityDescription, ): """A class that describes NVR switch entities.""" - supported: Callable[[Host], bool] = lambda api: True + method: Callable[[Host, bool], Any] + value: Callable[[Host], bool] SWITCH_ENTITIES = ( @@ -249,12 +244,8 @@ class ReolinkSwitchEntity(ReolinkChannelCoordinatorEntity, SwitchEntity): entity_description: ReolinkSwitchEntityDescription, ) -> None: """Initialize Reolink switch entity.""" - super().__init__(reolink_data, channel) self.entity_description = entity_description - - self._attr_unique_id = ( - f"{self._host.unique_id}_{channel}_{entity_description.key}" - ) + super().__init__(reolink_data, channel) @property def is_on(self) -> bool: @@ -263,12 +254,18 @@ class ReolinkSwitchEntity(ReolinkChannelCoordinatorEntity, SwitchEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - await self.entity_description.method(self._host.api, self._channel, True) + try: + await self.entity_description.method(self._host.api, self._channel, True) + except ReolinkError as err: + raise HomeAssistantError(err) from err self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - await self.entity_description.method(self._host.api, self._channel, False) + try: + await self.entity_description.method(self._host.api, self._channel, False) + except ReolinkError as err: + raise HomeAssistantError(err) from err self.async_write_ha_state() @@ -283,8 +280,8 @@ class ReolinkNVRSwitchEntity(ReolinkHostCoordinatorEntity, SwitchEntity): entity_description: ReolinkNVRSwitchEntityDescription, ) -> None: """Initialize Reolink switch entity.""" - super().__init__(reolink_data) self.entity_description = entity_description + super().__init__(reolink_data) self._attr_unique_id = f"{self._host.unique_id}_{entity_description.key}" @@ -295,10 +292,16 @@ class ReolinkNVRSwitchEntity(ReolinkHostCoordinatorEntity, SwitchEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - await self.entity_description.method(self._host.api, True) + try: + await self.entity_description.method(self._host.api, True) + except ReolinkError as err: + raise HomeAssistantError(err) from err self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - await self.entity_description.method(self._host.api, False) + try: + await self.entity_description.method(self._host.api, False) + except ReolinkError as err: + raise HomeAssistantError(err) from err self.async_write_ha_state() diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index 1c10671550d..ffd429e92ad 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -1,6 +1,7 @@ """Update entities for Reolink devices.""" from __future__ import annotations +from datetime import datetime import logging from typing import Any, Literal @@ -13,9 +14,10 @@ from homeassistant.components.update import ( UpdateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_call_later from . import ReolinkData from .const import DOMAIN @@ -23,6 +25,8 @@ from .entity import ReolinkBaseCoordinatorEntity LOGGER = logging.getLogger(__name__) +POLL_AFTER_INSTALL = 120 + async def async_setup_entry( hass: HomeAssistant, @@ -51,6 +55,7 @@ class ReolinkUpdateEntity( super().__init__(reolink_data, reolink_data.firmware_coordinator) self._attr_unique_id = f"{self._host.unique_id}" + self._cancel_update: CALLBACK_TYPE | None = None @property def installed_version(self) -> str | None: @@ -98,3 +103,18 @@ class ReolinkUpdateEntity( raise HomeAssistantError( f"Error trying to update Reolink firmware: {err}" ) from err + finally: + self.async_write_ha_state() + self._cancel_update = async_call_later( + self.hass, POLL_AFTER_INSTALL, self._async_update_future + ) + + async def _async_update_future(self, now: datetime | None = None) -> None: + """Request update.""" + await self.async_update() + + async def async_will_remove_from_hass(self) -> None: + """Entity removed.""" + await super().async_will_remove_from_hass() + if self._cancel_update is not None: + self._cancel_update() diff --git a/homeassistant/components/repetier/manifest.json b/homeassistant/components/repetier/manifest.json index 5ad3db89ba0..dfddb298284 100644 --- a/homeassistant/components/repetier/manifest.json +++ b/homeassistant/components/repetier/manifest.json @@ -1,7 +1,7 @@ { "domain": "repetier", "name": "Repetier-Server", - "codeowners": ["@MTrab", "@ShadowBr0ther"], + "codeowners": ["@ShadowBr0ther"], "documentation": "https://www.home-assistant.io/integrations/repetier", "iot_class": "local_polling", "loggers": ["pyrepetierng"], diff --git a/homeassistant/components/rfxtrx/config_flow.py b/homeassistant/components/rfxtrx/config_flow.py index 179dd04cfaa..54a60d34229 100644 --- a/homeassistant/components/rfxtrx/config_flow.py +++ b/homeassistant/components/rfxtrx/config_flow.py @@ -566,10 +566,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports) list_of_ports = {} for port in ports: - list_of_ports[ - port.device - ] = f"{port}, s/n: {port.serial_number or 'n/a'}" + ( - f" - {port.manufacturer}" if port.manufacturer else "" + list_of_ports[port.device] = ( + f"{port}, s/n: {port.serial_number or 'n/a'}" + + (f" - {port.manufacturer}" if port.manufacturer else "") ) list_of_ports[CONF_MANUAL_PATH] = CONF_MANUAL_PATH diff --git a/homeassistant/components/ring/__init__.py b/homeassistant/components/ring/__init__.py index 56aad1a845b..157a62df05b 100644 --- a/homeassistant/components/ring/__init__.py +++ b/homeassistant/components/ring/__init__.py @@ -8,36 +8,33 @@ from functools import partial import logging from typing import Any -from oauthlib.oauth2 import AccessDeniedError -import requests -from ring_doorbell import Auth, Ring +import ring_doorbell from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform, __version__ +from homeassistant.const import APPLICATION_NAME, CONF_TOKEN, __version__ from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import device_registry as dr from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.async_ import run_callback_threadsafe +from .const import ( + DEVICES_SCAN_INTERVAL, + DOMAIN, + HEALTH_SCAN_INTERVAL, + HISTORY_SCAN_INTERVAL, + NOTIFICATIONS_SCAN_INTERVAL, + PLATFORMS, + RING_API, + RING_DEVICES, + RING_DEVICES_COORDINATOR, + RING_HEALTH_COORDINATOR, + RING_HISTORY_COORDINATOR, + RING_NOTIFICATIONS_COORDINATOR, +) + _LOGGER = logging.getLogger(__name__) -ATTRIBUTION = "Data provided by Ring.com" - -NOTIFICATION_ID = "ring_notification" -NOTIFICATION_TITLE = "Ring Setup" - -DOMAIN = "ring" -DEFAULT_ENTITY_NAMESPACE = "ring" - -PLATFORMS = [ - Platform.BINARY_SENSOR, - Platform.LIGHT, - Platform.SENSOR, - Platform.SWITCH, - Platform.CAMERA, - Platform.SIREN, -] - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" @@ -49,48 +46,50 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: partial( hass.config_entries.async_update_entry, entry, - data={**entry.data, "token": token}, + data={**entry.data, CONF_TOKEN: token}, ), ).result() - auth = Auth(f"HomeAssistant/{__version__}", entry.data["token"], token_updater) - ring = Ring(auth) + auth = ring_doorbell.Auth( + f"{APPLICATION_NAME}/{__version__}", entry.data[CONF_TOKEN], token_updater + ) + ring = ring_doorbell.Ring(auth) try: await hass.async_add_executor_job(ring.update_data) - except AccessDeniedError: - _LOGGER.error("Access token is no longer valid. Please set up Ring again") - return False + except ring_doorbell.AuthenticationError as err: + _LOGGER.warning("Ring access token is no longer valid, need to re-authenticate") + raise ConfigEntryAuthFailed(err) from err hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { - "api": ring, - "devices": ring.devices(), - "device_data": GlobalDataUpdater( - hass, "device", entry.entry_id, ring, "update_devices", timedelta(minutes=1) + RING_API: ring, + RING_DEVICES: ring.devices(), + RING_DEVICES_COORDINATOR: GlobalDataUpdater( + hass, "device", entry, ring, "update_devices", DEVICES_SCAN_INTERVAL ), - "dings_data": GlobalDataUpdater( + RING_NOTIFICATIONS_COORDINATOR: GlobalDataUpdater( hass, "active dings", - entry.entry_id, + entry, ring, "update_dings", - timedelta(seconds=5), + NOTIFICATIONS_SCAN_INTERVAL, ), - "history_data": DeviceDataUpdater( + RING_HISTORY_COORDINATOR: DeviceDataUpdater( hass, "history", - entry.entry_id, + entry, ring, lambda device: device.history(limit=10), - timedelta(minutes=1), + HISTORY_SCAN_INTERVAL, ), - "health_data": DeviceDataUpdater( + RING_HEALTH_COORDINATOR: DeviceDataUpdater( hass, "health", - entry.entry_id, + entry, ring, lambda device: device.update_health_data(), - timedelta(minutes=1), + HEALTH_SCAN_INTERVAL, ), } @@ -143,15 +142,15 @@ class GlobalDataUpdater: self, hass: HomeAssistant, data_type: str, - config_entry_id: str, - ring: Ring, + config_entry: ConfigEntry, + ring: ring_doorbell.Ring, update_method: str, update_interval: timedelta, ) -> None: """Initialize global data updater.""" self.hass = hass self.data_type = data_type - self.config_entry_id = config_entry_id + self.config_entry = config_entry self.ring = ring self.update_method = update_method self.update_interval = update_interval @@ -187,17 +186,19 @@ class GlobalDataUpdater: await self.hass.async_add_executor_job( getattr(self.ring, self.update_method) ) - except AccessDeniedError: - _LOGGER.error("Ring access token is no longer valid. Set up Ring again") - await self.hass.config_entries.async_unload(self.config_entry_id) + except ring_doorbell.AuthenticationError: + _LOGGER.warning( + "Ring access token is no longer valid, need to re-authenticate" + ) + self.config_entry.async_start_reauth(self.hass) return - except requests.Timeout: + except ring_doorbell.RingTimeout: _LOGGER.warning( "Time out fetching Ring %s data", self.data_type, ) return - except requests.RequestException as err: + except ring_doorbell.RingError as err: _LOGGER.warning( "Error fetching Ring %s data: %s", self.data_type, @@ -216,15 +217,15 @@ class DeviceDataUpdater: self, hass: HomeAssistant, data_type: str, - config_entry_id: str, - ring: Ring, - update_method: Callable[[Ring], Any], + config_entry: ConfigEntry, + ring: ring_doorbell.Ring, + update_method: Callable[[ring_doorbell.Ring], Any], update_interval: timedelta, ) -> None: """Initialize device data updater.""" self.data_type = data_type self.hass = hass - self.config_entry_id = config_entry_id + self.config_entry = config_entry self.ring = ring self.update_method = update_method self.update_interval = update_interval @@ -276,20 +277,22 @@ class DeviceDataUpdater: for device_id, info in self.devices.items(): try: data = info["data"] = self.update_method(info["device"]) - except AccessDeniedError: - _LOGGER.error("Ring access token is no longer valid. Set up Ring again") - self.hass.add_job( - self.hass.config_entries.async_unload(self.config_entry_id) + except ring_doorbell.AuthenticationError: + _LOGGER.warning( + "Ring access token is no longer valid, need to re-authenticate" + ) + self.hass.loop.call_soon_threadsafe( + self.config_entry.async_start_reauth, self.hass ) return - except requests.Timeout: + except ring_doorbell.RingTimeout: _LOGGER.warning( "Time out fetching Ring %s data for device %s", self.data_type, device_id, ) continue - except requests.RequestException as err: + except ring_doorbell.RingError as err: _LOGGER.warning( "Error fetching Ring %s data for device %s: %s", self.data_type, diff --git a/homeassistant/components/ring/binary_sensor.py b/homeassistant/components/ring/binary_sensor.py index ab7207f0ac4..05d26812f54 100644 --- a/homeassistant/components/ring/binary_sensor.py +++ b/homeassistant/components/ring/binary_sensor.py @@ -14,7 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN +from .const import DOMAIN, RING_API, RING_DEVICES, RING_NOTIFICATIONS_COORDINATOR from .entity import RingEntityMixin @@ -53,8 +53,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Ring binary sensors from a config entry.""" - ring = hass.data[DOMAIN][config_entry.entry_id]["api"] - devices = hass.data[DOMAIN][config_entry.entry_id]["devices"] + ring = hass.data[DOMAIN][config_entry.entry_id][RING_API] + devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES] entities = [ RingBinarySensor(config_entry.entry_id, ring, device, description) @@ -90,13 +90,15 @@ class RingBinarySensor(RingEntityMixin, BinarySensorEntity): async def async_added_to_hass(self) -> None: """Register callbacks.""" await super().async_added_to_hass() - self.ring_objects["dings_data"].async_add_listener(self._dings_update_callback) + self.ring_objects[RING_NOTIFICATIONS_COORDINATOR].async_add_listener( + self._dings_update_callback + ) self._dings_update_callback() async def async_will_remove_from_hass(self) -> None: """Disconnect callbacks.""" await super().async_will_remove_from_hass() - self.ring_objects["dings_data"].async_remove_listener( + self.ring_objects[RING_NOTIFICATIONS_COORDINATOR].async_remove_listener( self._dings_update_callback ) diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py index 7f897d17203..196d34600d1 100644 --- a/homeassistant/components/ring/camera.py +++ b/homeassistant/components/ring/camera.py @@ -16,7 +16,7 @@ from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import DOMAIN +from .const import DOMAIN, RING_DEVICES, RING_HISTORY_COORDINATOR from .entity import RingEntityMixin FORCE_REFRESH_INTERVAL = timedelta(minutes=3) @@ -30,7 +30,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Ring Door Bell and StickUp Camera.""" - devices = hass.data[DOMAIN][config_entry.entry_id]["devices"] + devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES] ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) cams = [] @@ -66,7 +66,7 @@ class RingCam(RingEntityMixin, Camera): """Register callbacks.""" await super().async_added_to_hass() - await self.ring_objects["history_data"].async_track_device( + await self.ring_objects[RING_HISTORY_COORDINATOR].async_track_device( self._device, self._history_update_callback ) @@ -74,7 +74,7 @@ class RingCam(RingEntityMixin, Camera): """Disconnect callbacks.""" await super().async_will_remove_from_hass() - self.ring_objects["history_data"].async_untrack_device( + self.ring_objects[RING_HISTORY_COORDINATOR].async_untrack_device( self._device, self._history_update_callback ) diff --git a/homeassistant/components/ring/config_flow.py b/homeassistant/components/ring/config_flow.py index 9425b2f98a4..5c735a3ee8c 100644 --- a/homeassistant/components/ring/config_flow.py +++ b/homeassistant/components/ring/config_flow.py @@ -1,34 +1,47 @@ """Config flow for Ring integration.""" +from collections.abc import Mapping import logging from typing import Any -from oauthlib.oauth2 import AccessDeniedError, MissingTokenError -from ring_doorbell import Auth +import ring_doorbell import voluptuous as vol from homeassistant import config_entries, core, exceptions -from homeassistant.const import __version__ as ha_version +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + APPLICATION_NAME, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, + __version__ as ha_version, +) +from homeassistant.data_entry_flow import FlowResult -from . import DOMAIN +from .const import CONF_2FA, DOMAIN _LOGGER = logging.getLogger(__name__) +STEP_USER_DATA_SCHEMA = vol.Schema( + {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} +) +STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) + async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect.""" - auth = Auth(f"HomeAssistant/{ha_version}") + auth = ring_doorbell.Auth(f"{APPLICATION_NAME}/{ha_version}") try: token = await hass.async_add_executor_job( auth.fetch_token, - data["username"], - data["password"], - data.get("2fa"), + data[CONF_USERNAME], + data[CONF_PASSWORD], + data.get(CONF_2FA), ) - except MissingTokenError as err: + except ring_doorbell.Requires2FAError as err: raise Require2FA from err - except AccessDeniedError as err: + except ring_doorbell.AuthenticationError as err: raise InvalidAuth from err return token @@ -40,6 +53,7 @@ class RingConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 1 user_pass: dict[str, Any] = {} + reauth_entry: ConfigEntry | None = None async def async_step_user(self, user_input=None): """Handle the initial step.""" @@ -47,39 +61,85 @@ class RingConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): if user_input is not None: try: token = await validate_input(self.hass, user_input) - await self.async_set_unique_id(user_input["username"]) - - return self.async_create_entry( - title=user_input["username"], - data={"username": user_input["username"], "token": token}, - ) except Require2FA: self.user_pass = user_input return await self.async_step_2fa() - except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" + else: + await self.async_set_unique_id(user_input[CONF_USERNAME]) + return self.async_create_entry( + title=user_input[CONF_USERNAME], + data={CONF_USERNAME: user_input[CONF_USERNAME], CONF_TOKEN: token}, + ) return self.async_show_form( - step_id="user", - data_schema=vol.Schema( - {vol.Required("username"): str, vol.Required("password"): str} - ), - errors=errors, + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors ) async def async_step_2fa(self, user_input=None): """Handle 2fa step.""" if user_input: + if self.reauth_entry: + return await self.async_step_reauth_confirm( + {**self.user_pass, **user_input} + ) + return await self.async_step_user({**self.user_pass, **user_input}) return self.async_show_form( step_id="2fa", - data_schema=vol.Schema({vol.Required("2fa"): str}), + data_schema=vol.Schema({vol.Required(CONF_2FA): str}), + ) + + async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult: + """Handle reauth upon an API authentication error.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Dialog that informs the user that reauth is required.""" + errors = {} + assert self.reauth_entry is not None + + if user_input: + user_input[CONF_USERNAME] = self.reauth_entry.data[CONF_USERNAME] + try: + token = await validate_input(self.hass, user_input) + except Require2FA: + self.user_pass = user_input + return await self.async_step_2fa() + except InvalidAuth: + errors["base"] = "invalid_auth" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + data = { + CONF_USERNAME: user_input[CONF_USERNAME], + CONF_TOKEN: token, + } + self.hass.config_entries.async_update_entry( + self.reauth_entry, data=data + ) + await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) + return self.async_abort(reason="reauth_successful") + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=STEP_REAUTH_DATA_SCHEMA, + errors=errors, + description_placeholders={ + CONF_USERNAME: self.reauth_entry.data[CONF_USERNAME] + }, ) diff --git a/homeassistant/components/ring/const.py b/homeassistant/components/ring/const.py new file mode 100644 index 00000000000..10d517ab4a3 --- /dev/null +++ b/homeassistant/components/ring/const.py @@ -0,0 +1,39 @@ +"""The Ring constants.""" +from __future__ import annotations + +from datetime import timedelta + +from homeassistant.const import Platform + +ATTRIBUTION = "Data provided by Ring.com" + +NOTIFICATION_ID = "ring_notification" +NOTIFICATION_TITLE = "Ring Setup" + +DOMAIN = "ring" +DEFAULT_ENTITY_NAMESPACE = "ring" + +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.LIGHT, + Platform.SENSOR, + Platform.SWITCH, + Platform.CAMERA, + Platform.SIREN, +] + + +DEVICES_SCAN_INTERVAL = timedelta(minutes=1) +NOTIFICATIONS_SCAN_INTERVAL = timedelta(seconds=5) +HISTORY_SCAN_INTERVAL = timedelta(minutes=1) +HEALTH_SCAN_INTERVAL = timedelta(minutes=1) + +RING_API = "api" +RING_DEVICES = "devices" + +RING_DEVICES_COORDINATOR = "device_data" +RING_NOTIFICATIONS_COORDINATOR = "dings_data" +RING_HISTORY_COORDINATOR = "history_data" +RING_HEALTH_COORDINATOR = "health_data" + +CONF_2FA = "2fa" diff --git a/homeassistant/components/ring/diagnostics.py b/homeassistant/components/ring/diagnostics.py new file mode 100644 index 00000000000..105800f8d13 --- /dev/null +++ b/homeassistant/components/ring/diagnostics.py @@ -0,0 +1,43 @@ +"""Diagnostics support for Ring.""" +from __future__ import annotations + +from typing import Any + +import ring_doorbell + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from .const import DOMAIN + +TO_REDACT = { + "id", + "device_id", + "description", + "first_name", + "last_name", + "email", + "location_id", + "ring_net_id", + "wifi_name", + "latitude", + "longitude", + "address", + "ring_id", +} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + ring: ring_doorbell.Ring = hass.data[DOMAIN][entry.entry_id]["api"] + devices_raw = [] + for device_type in ring.devices_data: + for device_id in ring.devices_data[device_type]: + devices_raw.append(ring.devices_data[device_type][device_id]) + return async_redact_data( + {"device_data": devices_raw}, + TO_REDACT, + ) diff --git a/homeassistant/components/ring/entity.py b/homeassistant/components/ring/entity.py index 7160d2ef725..4896ea2db8b 100644 --- a/homeassistant/components/ring/entity.py +++ b/homeassistant/components/ring/entity.py @@ -3,7 +3,7 @@ from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity -from . import ATTRIBUTION, DOMAIN +from .const import ATTRIBUTION, DOMAIN, RING_DEVICES_COORDINATOR class RingEntityMixin(Entity): @@ -28,11 +28,15 @@ class RingEntityMixin(Entity): async def async_added_to_hass(self) -> None: """Register callbacks.""" - self.ring_objects["device_data"].async_add_listener(self._update_callback) + self.ring_objects[RING_DEVICES_COORDINATOR].async_add_listener( + self._update_callback + ) async def async_will_remove_from_hass(self) -> None: """Disconnect callbacks.""" - self.ring_objects["device_data"].async_remove_listener(self._update_callback) + self.ring_objects[RING_DEVICES_COORDINATOR].async_remove_listener( + self._update_callback + ) @callback def _update_callback(self) -> None: diff --git a/homeassistant/components/ring/light.py b/homeassistant/components/ring/light.py index 93640e2764e..7830b2547a5 100644 --- a/homeassistant/components/ring/light.py +++ b/homeassistant/components/ring/light.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import DOMAIN +from .const import DOMAIN, RING_DEVICES from .entity import RingEntityMixin _LOGGER = logging.getLogger(__name__) @@ -34,7 +34,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Create the lights for the Ring devices.""" - devices = hass.data[DOMAIN][config_entry.entry_id]["devices"] + devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES] lights = [] diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index 9cea738eb3a..36514fc8f35 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/ring", "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], - "requirements": ["ring-doorbell==0.7.3"] + "requirements": ["ring-doorbell[listen]==0.8.3"] } diff --git a/homeassistant/components/ring/sensor.py b/homeassistant/components/ring/sensor.py index af23af07eba..465f6196689 100644 --- a/homeassistant/components/ring/sensor.py +++ b/homeassistant/components/ring/sensor.py @@ -14,7 +14,12 @@ from homeassistant.const import PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN +from .const import ( + DOMAIN, + RING_DEVICES, + RING_HEALTH_COORDINATOR, + RING_HISTORY_COORDINATOR, +) from .entity import RingEntityMixin @@ -24,7 +29,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up a sensor for a Ring device.""" - devices = hass.data[DOMAIN][config_entry.entry_id]["devices"] + devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES] entities = [ description.cls(config_entry.entry_id, device, description) @@ -75,7 +80,7 @@ class HealthDataRingSensor(RingSensor): """Register callbacks.""" await super().async_added_to_hass() - await self.ring_objects["health_data"].async_track_device( + await self.ring_objects[RING_HEALTH_COORDINATOR].async_track_device( self._device, self._health_update_callback ) @@ -83,7 +88,7 @@ class HealthDataRingSensor(RingSensor): """Disconnect callbacks.""" await super().async_will_remove_from_hass() - self.ring_objects["health_data"].async_untrack_device( + self.ring_objects[RING_HEALTH_COORDINATOR].async_untrack_device( self._device, self._health_update_callback ) @@ -112,7 +117,7 @@ class HistoryRingSensor(RingSensor): """Register callbacks.""" await super().async_added_to_hass() - await self.ring_objects["history_data"].async_track_device( + await self.ring_objects[RING_HISTORY_COORDINATOR].async_track_device( self._device, self._history_update_callback ) @@ -120,7 +125,7 @@ class HistoryRingSensor(RingSensor): """Disconnect callbacks.""" await super().async_will_remove_from_hass() - self.ring_objects["history_data"].async_untrack_device( + self.ring_objects[RING_HISTORY_COORDINATOR].async_untrack_device( self._device, self._history_update_callback ) diff --git a/homeassistant/components/ring/siren.py b/homeassistant/components/ring/siren.py index 7f1b147471d..7daf7bd69ca 100644 --- a/homeassistant/components/ring/siren.py +++ b/homeassistant/components/ring/siren.py @@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN +from .const import DOMAIN, RING_DEVICES from .entity import RingEntityMixin _LOGGER = logging.getLogger(__name__) @@ -21,7 +21,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Create the sirens for the Ring devices.""" - devices = hass.data[DOMAIN][config_entry.entry_id]["devices"] + devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES] sirens = [] for device in devices["chimes"]: diff --git a/homeassistant/components/ring/strings.json b/homeassistant/components/ring/strings.json index b300e335b19..688e3141beb 100644 --- a/homeassistant/components/ring/strings.json +++ b/homeassistant/components/ring/strings.json @@ -13,6 +13,13 @@ "data": { "2fa": "Two-factor code" } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The Ring integration needs to re-authenticate your account {username}", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { @@ -20,7 +27,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/homeassistant/components/ring/switch.py b/homeassistant/components/ring/switch.py index 7069acd5f0f..074dfee9bd6 100644 --- a/homeassistant/components/ring/switch.py +++ b/homeassistant/components/ring/switch.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import DOMAIN +from .const import DOMAIN, RING_DEVICES from .entity import RingEntityMixin _LOGGER = logging.getLogger(__name__) @@ -33,7 +33,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Create the switches for the Ring devices.""" - devices = hass.data[DOMAIN][config_entry.entry_id]["devices"] + devices = hass.data[DOMAIN][config_entry.entry_id][RING_DEVICES] switches = [] for device in devices["stickup_cams"]: diff --git a/homeassistant/components/rituals_perfume_genie/binary_sensor.py b/homeassistant/components/rituals_perfume_genie/binary_sensor.py index 73499fb5ccc..ab13898394c 100644 --- a/homeassistant/components/rituals_perfume_genie/binary_sensor.py +++ b/homeassistant/components/rituals_perfume_genie/binary_sensor.py @@ -21,21 +21,14 @@ from .coordinator import RitualsDataUpdateCoordinator from .entity import DiffuserEntity -@dataclass -class RitualsentityDescriptionMixin: - """Mixin values for Rituals entities.""" +@dataclass(kw_only=True) +class RitualsBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing Rituals binary sensor entities.""" is_on_fn: Callable[[Diffuser], bool] has_fn: Callable[[Diffuser], bool] -@dataclass -class RitualsBinarySensorEntityDescription( - BinarySensorEntityDescription, RitualsentityDescriptionMixin -): - """Class describing Rituals binary sensor entities.""" - - ENTITY_DESCRIPTIONS = ( RitualsBinarySensorEntityDescription( key="charging", diff --git a/homeassistant/components/rituals_perfume_genie/number.py b/homeassistant/components/rituals_perfume_genie/number.py index 3e6af33315f..35b5a3bd008 100644 --- a/homeassistant/components/rituals_perfume_genie/number.py +++ b/homeassistant/components/rituals_perfume_genie/number.py @@ -17,21 +17,14 @@ from .coordinator import RitualsDataUpdateCoordinator from .entity import DiffuserEntity -@dataclass -class RitualsNumberEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class RitualsNumberEntityDescription(NumberEntityDescription): + """Class describing Rituals number entities.""" value_fn: Callable[[Diffuser], int] set_value_fn: Callable[[Diffuser, int], Awaitable[Any]] -@dataclass -class RitualsNumberEntityDescription( - NumberEntityDescription, RitualsNumberEntityDescriptionMixin -): - """Class describing Rituals number entities.""" - - ENTITY_DESCRIPTIONS = ( RitualsNumberEntityDescription( key="perfume_amount", diff --git a/homeassistant/components/rituals_perfume_genie/select.py b/homeassistant/components/rituals_perfume_genie/select.py index 42e18624d13..2126ecb147f 100644 --- a/homeassistant/components/rituals_perfume_genie/select.py +++ b/homeassistant/components/rituals_perfume_genie/select.py @@ -17,21 +17,14 @@ from .coordinator import RitualsDataUpdateCoordinator from .entity import DiffuserEntity -@dataclass -class RitualsEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class RitualsSelectEntityDescription(SelectEntityDescription): + """Class describing Rituals select entities.""" current_fn: Callable[[Diffuser], str] select_fn: Callable[[Diffuser, str], Awaitable[None]] -@dataclass -class RitualsSelectEntityDescription( - SelectEntityDescription, RitualsEntityDescriptionMixin -): - """Class describing Rituals select entities.""" - - ENTITY_DESCRIPTIONS = ( RitualsSelectEntityDescription( key="room_size_square_meter", diff --git a/homeassistant/components/rituals_perfume_genie/sensor.py b/homeassistant/components/rituals_perfume_genie/sensor.py index 09189dabfad..5f7ae45d330 100644 --- a/homeassistant/components/rituals_perfume_genie/sensor.py +++ b/homeassistant/components/rituals_perfume_genie/sensor.py @@ -21,20 +21,12 @@ from .coordinator import RitualsDataUpdateCoordinator from .entity import DiffuserEntity -@dataclass -class RitualsEntityDescriptionMixin: - """Mixin values for Rituals entities.""" - - value_fn: Callable[[Diffuser], int | str] - - -@dataclass -class RitualsSensorEntityDescription( - SensorEntityDescription, RitualsEntityDescriptionMixin -): +@dataclass(kw_only=True) +class RitualsSensorEntityDescription(SensorEntityDescription): """Class describing Rituals sensor entities.""" has_fn: Callable[[Diffuser], bool] = lambda _: True + value_fn: Callable[[Diffuser], int | str] ENTITY_DESCRIPTIONS = ( diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index b310b2bb2ba..ff49b352c18 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -2,17 +2,20 @@ from __future__ import annotations import asyncio +from collections.abc import Coroutine from datetime import timedelta import logging +from typing import Any +from roborock import RoborockException, RoborockInvalidCredentials from roborock.api import RoborockApiClient from roborock.cloud_api import RoborockMqttClient -from roborock.containers import DeviceData, HomeDataDevice, UserData +from roborock.containers import DeviceData, HomeDataDevice, HomeDataProduct, UserData from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from .const import CONF_BASE_URL, CONF_USER_DATA, DOMAIN, PLATFORMS from .coordinator import RoborockDataUpdateCoordinator @@ -29,66 +32,113 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: user_data = UserData.from_dict(entry.data[CONF_USER_DATA]) api_client = RoborockApiClient(entry.data[CONF_USERNAME], entry.data[CONF_BASE_URL]) _LOGGER.debug("Getting home data") - home_data = await api_client.get_home_data(user_data) + try: + home_data = await api_client.get_home_data(user_data) + except RoborockInvalidCredentials as err: + raise ConfigEntryAuthFailed("Invalid credentials.") from err + except RoborockException as err: + raise ConfigEntryNotReady("Failed getting Roborock home_data.") from err _LOGGER.debug("Got home data %s", home_data) device_map: dict[str, HomeDataDevice] = { device.duid: device for device in home_data.devices + home_data.received_devices } - product_info = {product.id: product for product in home_data.products} - # Create a mqtt_client, which is needed to get the networking information of the device for local connection and in the future, get the map. - mqtt_clients = { - device.duid: RoborockMqttClient( - user_data, DeviceData(device, product_info[device.product_id].model) - ) - for device in device_map.values() + product_info: dict[str, HomeDataProduct] = { + product.id: product for product in home_data.products } - network_results = await asyncio.gather( - *(mqtt_client.get_networking() for mqtt_client in mqtt_clients.values()) - ) - network_info = { - device.duid: result - for device, result in zip(device_map.values(), network_results) - if result is not None - } - if not network_info: - raise ConfigEntryNotReady( - "Could not get network information about your devices" - ) - coordinator_map: dict[str, RoborockDataUpdateCoordinator] = {} - for device_id, device in device_map.items(): - coordinator_map[device_id] = RoborockDataUpdateCoordinator( - hass, - device, - network_info[device_id], - product_info[device.product_id], - mqtt_clients[device.duid], - ) - await asyncio.gather( - *(coordinator.verify_api() for coordinator in coordinator_map.values()) - ) - # If one device update fails - we still want to set up other devices - await asyncio.gather( - *( - coordinator.async_config_entry_first_refresh() - for coordinator in coordinator_map.values() - ), + # Get a Coordinator if the device is available or if we have connected to the device before + coordinators = await asyncio.gather( + *build_setup_functions(hass, device_map, user_data, product_info), return_exceptions=True, ) + # Valid coordinators are those where we had networking cached or we could get networking + valid_coordinators: list[RoborockDataUpdateCoordinator] = [ + coord + for coord in coordinators + if isinstance(coord, RoborockDataUpdateCoordinator) + ] + if len(valid_coordinators) == 0: + raise ConfigEntryNotReady("No coordinators were able to successfully setup.") hass.data.setdefault(DOMAIN, {})[entry.entry_id] = { - device_id: coordinator - for device_id, coordinator in coordinator_map.items() - if coordinator.last_update_success - } # Only add coordinators that succeeded - - if not hass.data[DOMAIN][entry.entry_id]: - # Don't start if no coordinators succeeded. - raise ConfigEntryNotReady("There are no devices that can currently be reached.") - + coordinator.roborock_device_info.device.duid: coordinator + for coordinator in valid_coordinators + } await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True +def build_setup_functions( + hass: HomeAssistant, + device_map: dict[str, HomeDataDevice], + user_data: UserData, + product_info: dict[str, HomeDataProduct], +) -> list[Coroutine[Any, Any, RoborockDataUpdateCoordinator | None]]: + """Create a list of setup functions that can later be called asynchronously.""" + setup_functions = [] + for device in device_map.values(): + setup_functions.append( + setup_device(hass, user_data, device, product_info[device.product_id]) + ) + return setup_functions + + +async def setup_device( + hass: HomeAssistant, + user_data: UserData, + device: HomeDataDevice, + product_info: HomeDataProduct, +) -> RoborockDataUpdateCoordinator | None: + """Set up a device Coordinator.""" + mqtt_client = RoborockMqttClient(user_data, DeviceData(device, product_info.name)) + try: + networking = await mqtt_client.get_networking() + if networking is None: + # If the api does not return an error but does return None for + # get_networking - then we need to go through cache checking. + raise RoborockException("Networking request returned None.") + except RoborockException as err: + _LOGGER.warning( + "Not setting up %s because we could not get the network information of the device. " + "Please confirm it is online and the Roborock servers can communicate with it", + device.name, + ) + _LOGGER.debug(err) + raise err + coordinator = RoborockDataUpdateCoordinator( + hass, device, networking, product_info, mqtt_client + ) + # Verify we can communicate locally - if we can't, switch to cloud api + await coordinator.verify_api() + coordinator.api.is_available = True + try: + await coordinator.async_config_entry_first_refresh() + except ConfigEntryNotReady: + if isinstance(coordinator.api, RoborockMqttClient): + _LOGGER.warning( + "Not setting up %s because the we failed to get data for the first time using the online client. " + "Please ensure your Home Assistant instance can communicate with this device. " + "You may need to open firewall instances on your Home Assistant network and on your Vacuum's network", + device.name, + ) + # Most of the time if we fail to connect using the mqtt client, the problem is due to firewall, + # but in case if it isn't, the error can be included in debug logs for the user to grab. + if coordinator.last_exception: + _LOGGER.debug(coordinator.last_exception) + raise coordinator.last_exception + elif coordinator.last_exception: + # If this is reached, we have verified that we can communicate with the Vacuum locally, + # so if there is an error here - it is not a communication issue but some other problem + extra_error = f"Please create an issue with the following error included: {coordinator.last_exception}" + _LOGGER.warning( + "Not setting up %s because the coordinator failed to get data for the first time using the " + "offline client %s", + device.name, + extra_error, + ) + raise coordinator.last_exception + return coordinator + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Handle removal of an entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index fcfad6e8cd3..201631f0825 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Roborock.""" from __future__ import annotations +from collections.abc import Mapping import logging from typing import Any @@ -16,6 +17,7 @@ from roborock.exceptions import ( import voluptuous as vol from homeassistant import config_entries +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_USERNAME from homeassistant.data_entry_flow import FlowResult @@ -28,6 +30,7 @@ class RoborockFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Roborock.""" VERSION = 1 + reauth_entry: ConfigEntry | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -47,21 +50,8 @@ class RoborockFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): self._username = username _LOGGER.debug("Requesting code for Roborock account") self._client = RoborockApiClient(username) - try: - await self._client.request_code() - except RoborockAccountDoesNotExist: - errors["base"] = "invalid_email" - except RoborockUrlException: - errors["base"] = "unknown_url" - except RoborockInvalidEmail: - errors["base"] = "invalid_email_format" - except RoborockException as ex: - _LOGGER.exception(ex) - errors["base"] = "unknown_roborock" - except Exception as ex: # pylint: disable=broad-except - _LOGGER.exception(ex) - errors["base"] = "unknown" - else: + errors = await self._request_code() + if not errors: return await self.async_step_code() return self.async_show_form( step_id="user", @@ -69,6 +59,25 @@ class RoborockFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): errors=errors, ) + async def _request_code(self) -> dict: + assert self._client + errors: dict[str, str] = {} + try: + await self._client.request_code() + except RoborockAccountDoesNotExist: + errors["base"] = "invalid_email" + except RoborockUrlException: + errors["base"] = "unknown_url" + except RoborockInvalidEmail: + errors["base"] = "invalid_email_format" + except RoborockException as ex: + _LOGGER.exception(ex) + errors["base"] = "unknown_roborock" + except Exception as ex: # pylint: disable=broad-except + _LOGGER.exception(ex) + errors["base"] = "unknown" + return errors + async def async_step_code( self, user_input: dict[str, Any] | None = None, @@ -91,6 +100,18 @@ class RoborockFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): _LOGGER.exception(ex) errors["base"] = "unknown" else: + if self.reauth_entry is not None: + self.hass.config_entries.async_update_entry( + self.reauth_entry, + data={ + **self.reauth_entry.data, + CONF_USER_DATA: login_data.as_dict(), + }, + ) + await self.hass.config_entries.async_reload( + self.reauth_entry.entry_id + ) + return self.async_abort(reason="reauth_successful") return self._create_entry(self._client, self._username, login_data) return self.async_show_form( @@ -99,6 +120,27 @@ class RoborockFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult: + """Perform reauth upon an API authentication error.""" + self._username = entry_data[CONF_USERNAME] + assert self._username + self._client = RoborockApiClient(self._username) + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Confirm reauth dialog.""" + errors: dict[str, str] = {} + if user_input is not None: + errors = await self._request_code() + if not errors: + return await self.async_step_code() + return self.async_show_form(step_id="reauth_confirm", errors=errors) + def _create_entry( self, client: RoborockApiClient, username: str, user_data: UserData ) -> FlowResult: diff --git a/homeassistant/components/roborock/const.py b/homeassistant/components/roborock/const.py index d135f323e90..d7a3a9229f5 100644 --- a/homeassistant/components/roborock/const.py +++ b/homeassistant/components/roborock/const.py @@ -1,4 +1,6 @@ """Constants for Roborock.""" +from vacuum_map_parser_base.config.drawable import Drawable + from homeassistant.const import Platform DOMAIN = "roborock" @@ -9,6 +11,7 @@ CONF_USER_DATA = "user_data" PLATFORMS = [ Platform.BUTTON, Platform.BINARY_SENSOR, + Platform.IMAGE, Platform.NUMBER, Platform.SELECT, Platform.SENSOR, @@ -16,3 +19,13 @@ PLATFORMS = [ Platform.TIME, Platform.VACUUM, ] + +IMAGE_DRAWABLES: list[Drawable] = [ + Drawable.PATH, + Drawable.CHARGER, + Drawable.VACUUM_POSITION, +] + +IMAGE_CACHE_INTERVAL = 90 + +MAP_SLEEP = 3 diff --git a/homeassistant/components/roborock/coordinator.py b/homeassistant/components/roborock/coordinator.py index 30bfc71ea48..cd08cf871d4 100644 --- a/homeassistant/components/roborock/coordinator.py +++ b/homeassistant/components/roborock/coordinator.py @@ -55,6 +55,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): model=self.roborock_device_info.product.model, sw_version=self.roborock_device_info.device.fv, ) + self.current_map: int | None = None if mac := self.roborock_device_info.network_info.mac: self.device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, mac)} @@ -91,6 +92,18 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): """Update data via library.""" try: await self._update_device_prop() + self._set_current_map() except RoborockException as ex: raise UpdateFailed(ex) from ex return self.roborock_device_info.props + + def _set_current_map(self) -> None: + if ( + self.roborock_device_info.props.status is not None + and self.roborock_device_info.props.status.map_status is not None + ): + # The map status represents the map flag as flag * 4 + 3 - + # so we have to invert that in order to get the map flag that we can use to set the current map. + self.current_map = ( + self.roborock_device_info.props.status.map_status - 3 + ) // 4 diff --git a/homeassistant/components/roborock/device.py b/homeassistant/components/roborock/device.py index c8f45b40d82..71376dd600e 100644 --- a/homeassistant/components/roborock/device.py +++ b/homeassistant/components/roborock/device.py @@ -3,8 +3,9 @@ from typing import Any from roborock.api import AttributeCache, RoborockClient +from roborock.cloud_api import RoborockMqttClient from roborock.command_cache import CacheableAttribute -from roborock.containers import Status +from roborock.containers import Consumable, Status from roborock.exceptions import RoborockException from roborock.roborock_typing import RoborockCommand @@ -82,6 +83,11 @@ class RoborockCoordinatedEntity( data = self.coordinator.data return data.status + @property + def cloud_api(self) -> RoborockMqttClient: + """Return the cloud api.""" + return self.coordinator.cloud_api + async def send( self, command: RoborockCommand | str, @@ -91,3 +97,12 @@ class RoborockCoordinatedEntity( res = await super().send(command, params) await self.coordinator.async_refresh() return res + + def _update_from_listener(self, value: Status | Consumable): + """Update the status or consumable data from a listener and then write the new entity state.""" + if isinstance(value, Status): + self.coordinator.roborock_device_info.props.status = value + else: + self.coordinator.roborock_device_info.props.consumable = value + self.coordinator.data = self.coordinator.roborock_device_info.props + self.async_write_ha_state() diff --git a/homeassistant/components/roborock/image.py b/homeassistant/components/roborock/image.py new file mode 100644 index 00000000000..5e61bb1d408 --- /dev/null +++ b/homeassistant/components/roborock/image.py @@ -0,0 +1,164 @@ +"""Support for Roborock image.""" +import asyncio +import io +from itertools import chain + +from roborock import RoborockCommand +from vacuum_map_parser_base.config.color import ColorsPalette +from vacuum_map_parser_base.config.image_config import ImageConfig +from vacuum_map_parser_base.config.size import Sizes +from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser + +from homeassistant.components.image import ImageEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import slugify +import homeassistant.util.dt as dt_util + +from .const import DOMAIN, IMAGE_CACHE_INTERVAL, IMAGE_DRAWABLES, MAP_SLEEP +from .coordinator import RoborockDataUpdateCoordinator +from .device import RoborockCoordinatedEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Roborock image platform.""" + + coordinators: dict[str, RoborockDataUpdateCoordinator] = hass.data[DOMAIN][ + config_entry.entry_id + ] + entities = list( + chain.from_iterable( + await asyncio.gather( + *(create_coordinator_maps(coord) for coord in coordinators.values()) + ) + ) + ) + async_add_entities(entities) + + +class RoborockMap(RoborockCoordinatedEntity, ImageEntity): + """A class to let you visualize the map.""" + + _attr_has_entity_name = True + + def __init__( + self, + unique_id: str, + coordinator: RoborockDataUpdateCoordinator, + map_flag: int, + starting_map: bytes, + map_name: str, + ) -> None: + """Initialize a Roborock map.""" + RoborockCoordinatedEntity.__init__(self, unique_id, coordinator) + ImageEntity.__init__(self, coordinator.hass) + self._attr_name = map_name + self.parser = RoborockMapDataParser( + ColorsPalette(), Sizes(), IMAGE_DRAWABLES, ImageConfig(), [] + ) + self._attr_image_last_updated = dt_util.utcnow() + self.map_flag = map_flag + self.cached_map = self._create_image(starting_map) + + @property + def entity_category(self) -> EntityCategory | None: + """Return diagnostic entity category for any non-selected maps.""" + if not self.is_selected: + return EntityCategory.DIAGNOSTIC + return None + + @property + def is_selected(self) -> bool: + """Return if this map is the currently selected map.""" + return self.map_flag == self.coordinator.current_map + + def is_map_valid(self) -> bool: + """Update this map if it is the current active map, and the vacuum is cleaning.""" + return ( + self.is_selected + and self.image_last_updated is not None + and self.coordinator.roborock_device_info.props.status is not None + and bool(self.coordinator.roborock_device_info.props.status.in_cleaning) + ) + + def _handle_coordinator_update(self): + # Bump last updated every third time the coordinator runs, so that async_image + # will be called and we will evaluate on the new coordinator data if we should + # update the cache. + if ( + dt_util.utcnow() - self.image_last_updated + ).total_seconds() > IMAGE_CACHE_INTERVAL and self.is_map_valid(): + self._attr_image_last_updated = dt_util.utcnow() + super()._handle_coordinator_update() + + async def async_image(self) -> bytes | None: + """Update the image if it is not cached.""" + if self.is_map_valid(): + map_data: bytes = await self.cloud_api.get_map_v1() + self.cached_map = self._create_image(map_data) + return self.cached_map + + def _create_image(self, map_bytes: bytes) -> bytes: + """Create an image using the map parser.""" + parsed_map = self.parser.parse(map_bytes) + if parsed_map.image is None: + raise HomeAssistantError("Something went wrong creating the map.") + img_byte_arr = io.BytesIO() + parsed_map.image.data.save(img_byte_arr, format="PNG") + return img_byte_arr.getvalue() + + +async def create_coordinator_maps( + coord: RoborockDataUpdateCoordinator, +) -> list[RoborockMap]: + """Get the starting map information for all maps for this device. The following steps must be done synchronously. + + Only one map can be loaded at a time per device. + """ + entities = [] + maps = await coord.cloud_api.get_multi_maps_list() + if maps is not None and maps.map_info is not None: + cur_map = coord.current_map + # This won't be None at this point as the coordinator will have run first. + assert cur_map is not None + # Sort the maps so that we start with the current map and we can skip the + # load_multi_map call. + maps_info = sorted( + maps.map_info, key=lambda data: data.mapFlag == cur_map, reverse=True + ) + for roborock_map in maps_info: + # Load the map - so we can access it with get_map_v1 + if roborock_map.mapFlag != cur_map: + # Only change the map and sleep if we have multiple maps. + await coord.api.send_command( + RoborockCommand.LOAD_MULTI_MAP, [roborock_map.mapFlag] + ) + # We cannot get the map until the roborock servers fully process the + # map change. + await asyncio.sleep(MAP_SLEEP) + # Get the map data + api_data: bytes = await coord.cloud_api.get_map_v1() + entities.append( + RoborockMap( + f"{slugify(coord.roborock_device_info.device.duid)}_map_{roborock_map.name}", + coord, + roborock_map.mapFlag, + api_data, + roborock_map.name, + ) + ) + if len(maps.map_info) != 1: + # Set the map back to the map the user previously had selected so that it + # does not change the end user's app. + # Only needs to happen when we changed maps above. + await coord.cloud_api.send_command( + RoborockCommand.LOAD_MULTI_MAP, [cur_map] + ) + return entities diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index ed043582a0e..beb467d69f9 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -6,5 +6,8 @@ "documentation": "https://www.home-assistant.io/integrations/roborock", "iot_class": "local_polling", "loggers": ["roborock"], - "requirements": ["python-roborock==0.36.1"] + "requirements": [ + "python-roborock==0.36.2", + "vacuum-map-parser-roborock==0.1.1" + ] } diff --git a/homeassistant/components/roborock/select.py b/homeassistant/components/roborock/select.py index f4968bf7db9..1a05f3ec9c1 100644 --- a/homeassistant/components/roborock/select.py +++ b/homeassistant/components/roborock/select.py @@ -3,6 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass from roborock.containers import Status +from roborock.roborock_message import RoborockDataProtocol from roborock.roborock_typing import RoborockCommand from homeassistant.components.select import SelectEntity, SelectEntityDescription @@ -37,6 +38,8 @@ class RoborockSelectDescription( ): """Class to describe an Roborock select entity.""" + protocol_listener: RoborockDataProtocol | None = None + SELECT_DESCRIPTIONS: list[RoborockSelectDescription] = [ RoborockSelectDescription( @@ -49,6 +52,7 @@ SELECT_DESCRIPTIONS: list[RoborockSelectDescription] = [ if data.water_box_mode is not None else None, parameter_lambda=lambda key, status: [status.get_mop_intensity_code(key)], + protocol_listener=RoborockDataProtocol.WATER_BOX_MODE, ), RoborockSelectDescription( key="mop_mode", @@ -105,6 +109,8 @@ class RoborockSelectEntity(RoborockCoordinatedEntity, SelectEntity): self.entity_description = entity_description super().__init__(unique_id, coordinator) self._attr_options = options + if (protocol := self.entity_description.protocol_listener) is not None: + self.api.add_listener(protocol, self._update_from_listener, self.api.cache) async def async_select_option(self, option: str) -> None: """Set the option.""" diff --git a/homeassistant/components/roborock/sensor.py b/homeassistant/components/roborock/sensor.py index 090ab2f233c..775fc0cfb5f 100644 --- a/homeassistant/components/roborock/sensor.py +++ b/homeassistant/components/roborock/sensor.py @@ -11,6 +11,7 @@ from roborock.containers import ( RoborockErrorCode, RoborockStateCode, ) +from roborock.roborock_message import RoborockDataProtocol from roborock.roborock_typing import DeviceProp from homeassistant.components.sensor import ( @@ -48,6 +49,8 @@ class RoborockSensorDescription( ): """A class that describes Roborock sensors.""" + protocol_listener: RoborockDataProtocol | None = None + def _dock_error_value_fn(properties: DeviceProp) -> str | None: if ( @@ -67,6 +70,7 @@ SENSOR_DESCRIPTIONS = [ translation_key="main_brush_time_left", value_fn=lambda data: data.consumable.main_brush_time_left, entity_category=EntityCategory.DIAGNOSTIC, + protocol_listener=RoborockDataProtocol.MAIN_BRUSH_WORK_TIME, ), RoborockSensorDescription( native_unit_of_measurement=UnitOfTime.SECONDS, @@ -76,6 +80,7 @@ SENSOR_DESCRIPTIONS = [ translation_key="side_brush_time_left", value_fn=lambda data: data.consumable.side_brush_time_left, entity_category=EntityCategory.DIAGNOSTIC, + protocol_listener=RoborockDataProtocol.SIDE_BRUSH_WORK_TIME, ), RoborockSensorDescription( native_unit_of_measurement=UnitOfTime.SECONDS, @@ -85,6 +90,7 @@ SENSOR_DESCRIPTIONS = [ translation_key="filter_time_left", value_fn=lambda data: data.consumable.filter_time_left, entity_category=EntityCategory.DIAGNOSTIC, + protocol_listener=RoborockDataProtocol.FILTER_WORK_TIME, ), RoborockSensorDescription( native_unit_of_measurement=UnitOfTime.SECONDS, @@ -120,6 +126,7 @@ SENSOR_DESCRIPTIONS = [ value_fn=lambda data: data.status.state_name, entity_category=EntityCategory.DIAGNOSTIC, options=RoborockStateCode.keys(), + protocol_listener=RoborockDataProtocol.STATE, ), RoborockSensorDescription( key="cleaning_area", @@ -145,6 +152,7 @@ SENSOR_DESCRIPTIONS = [ value_fn=lambda data: data.status.error_code_name, entity_category=EntityCategory.DIAGNOSTIC, options=RoborockErrorCode.keys(), + protocol_listener=RoborockDataProtocol.ERROR_CODE, ), RoborockSensorDescription( key="battery", @@ -152,6 +160,7 @@ SENSOR_DESCRIPTIONS = [ entity_category=EntityCategory.DIAGNOSTIC, native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.BATTERY, + protocol_listener=RoborockDataProtocol.BATTERY, ), RoborockSensorDescription( key="last_clean_start", @@ -238,6 +247,8 @@ class RoborockSensorEntity(RoborockCoordinatedEntity, SensorEntity): """Initialize the entity.""" super().__init__(unique_id, coordinator) self.entity_description = description + if (protocol := self.entity_description.protocol_listener) is not None: + self.api.add_listener(protocol, self._update_from_listener, self.api.cache) @property def native_value(self) -> StateType | datetime.datetime: diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index 8841741d4a1..67660816de7 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -12,6 +12,10 @@ "data": { "code": "Verification code" } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The Roborock integration needs to re-authenticate your account" } }, "error": { @@ -23,7 +27,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index 0edd8e3ec5a..c8b43e74efd 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -2,6 +2,7 @@ from typing import Any from roborock.code_mappings import RoborockStateCode +from roborock.roborock_message import RoborockDataProtocol from roborock.roborock_typing import RoborockCommand from homeassistant.components.vacuum import ( @@ -94,6 +95,12 @@ class RoborockVacuum(RoborockCoordinatedEntity, StateVacuumEntity): StateVacuumEntity.__init__(self) RoborockCoordinatedEntity.__init__(self, unique_id, coordinator) self._attr_fan_speed_list = self._device_status.fan_power_options + self.api.add_listener( + RoborockDataProtocol.FAN_POWER, self._update_from_listener, self.api.cache + ) + self.api.add_listener( + RoborockDataProtocol.STATE, self._update_from_listener, self.api.cache + ) @property def state(self) -> str | None: diff --git a/homeassistant/components/roku/strings.json b/homeassistant/components/roku/strings.json index 818b43930f4..9eef366163e 100644 --- a/homeassistant/components/roku/strings.json +++ b/homeassistant/components/roku/strings.json @@ -6,6 +6,9 @@ "description": "Enter your Roku information.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Roku device to control." } }, "discovery_confirm": { diff --git a/homeassistant/components/sabnzbd/__init__.py b/homeassistant/components/sabnzbd/__init__.py index babdbc573bd..b1d118e6f75 100644 --- a/homeassistant/components/sabnzbd/__init__.py +++ b/homeassistant/components/sabnzbd/__init__.py @@ -1,8 +1,9 @@ """Support for monitoring an SABnzbd NZB client.""" from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Coroutine import logging +from typing import Any from pysabnzbd import SabnzbdApiException import voluptuous as vol @@ -189,7 +190,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: update_device_identifiers(hass, entry) @callback - def extract_api(func: Callable) -> Callable: + def extract_api( + func: Callable[[ServiceCall, SabnzbdApiData], Coroutine[Any, Any, None]] + ) -> Callable[[ServiceCall], Coroutine[Any, Any, None]]: """Define a decorator to get the correct api for a service call.""" async def wrapper(call: ServiceCall) -> None: diff --git a/homeassistant/components/scene/services.yaml b/homeassistant/components/scene/services.yaml index 543cefd5b9a..a2139529ccf 100644 --- a/homeassistant/components/scene/services.yaml +++ b/homeassistant/components/scene/services.yaml @@ -54,3 +54,9 @@ create: selector: entity: multiple: true + +delete: + target: + entity: + - integration: homeassistant + domain: scene diff --git a/homeassistant/components/scene/strings.json b/homeassistant/components/scene/strings.json index 3bfea1b09e7..af91b2e227e 100644 --- a/homeassistant/components/scene/strings.json +++ b/homeassistant/components/scene/strings.json @@ -46,6 +46,18 @@ "description": "List of entities to be included in the snapshot. By taking a snapshot, you record the current state of those entities. If you do not want to use the current state of all your entities for this scene, you can combine the `snapshot_entities` with `entities`." } } + }, + "delete": { + "name": "Delete", + "description": "Deletes a dynamically created scene." + } + }, + "exceptions": { + "entity_not_scene": { + "message": "{entity_id} is not a valid scene entity_id." + }, + "entity_not_dynamically_created": { + "message": "The scene {entity_id} is not created with service `scene.create`." } } } diff --git a/homeassistant/components/schlage/__init__.py b/homeassistant/components/schlage/__init__.py index feaa95864d5..96ff32d3e85 100644 --- a/homeassistant/components/schlage/__init__.py +++ b/homeassistant/components/schlage/__init__.py @@ -7,8 +7,9 @@ import pyschlage from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import DOMAIN, LOGGER +from .const import DOMAIN from .coordinator import SchlageDataUpdateCoordinator PLATFORMS: list[Platform] = [ @@ -26,8 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: auth = await hass.async_add_executor_job(pyschlage.Auth, username, password) except WarrantException as ex: - LOGGER.error("Schlage authentication failed: %s", ex) - return False + raise ConfigEntryAuthFailed from ex coordinator = SchlageDataUpdateCoordinator(hass, username, pyschlage.Schlage(auth)) hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator diff --git a/homeassistant/components/schlage/config_flow.py b/homeassistant/components/schlage/config_flow.py index 7e095466087..84bc3ef8ef6 100644 --- a/homeassistant/components/schlage/config_flow.py +++ b/homeassistant/components/schlage/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Schlage integration.""" from __future__ import annotations +from collections.abc import Mapping from typing import Any import pyschlage @@ -8,6 +9,7 @@ from pyschlage.exceptions import NotAuthorizedError import voluptuous as vol from homeassistant import config_entries +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.data_entry_flow import FlowResult @@ -16,6 +18,7 @@ from .const import DOMAIN, LOGGER STEP_USER_DATA_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} ) +STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): @@ -23,36 +26,88 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 1 + reauth_entry: ConfigEntry | None = None + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> FlowResult: """Handle the initial step.""" - errors: dict[str, str] = {} - if user_input is not None: - username = user_input[CONF_USERNAME] - password = user_input[CONF_PASSWORD] - try: - user_id = await self.hass.async_add_executor_job( - _authenticate, username, password - ) - except NotAuthorizedError: - errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except - LOGGER.exception("Unknown error") - errors["base"] = "unknown" - else: - await self.async_set_unique_id(user_id) - return self.async_create_entry(title=username, data=user_input) + if user_input is None: + return self._show_user_form({}) + username = user_input[CONF_USERNAME] + password = user_input[CONF_PASSWORD] + user_id, errors = await self.hass.async_add_executor_job( + _authenticate, username, password + ) + if user_id is None: + return self._show_user_form(errors) + await self.async_set_unique_id(user_id) + return self.async_create_entry(title=username, data=user_input) + + def _show_user_form(self, errors: dict[str, str]) -> FlowResult: + """Show the user form.""" return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors ) + async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult: + """Handle reauth upon an API authentication error.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() -def _authenticate(username: str, password: str) -> str: + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Dialog that informs the user that reauth is required.""" + assert self.reauth_entry is not None + if user_input is None: + return self._show_reauth_form({}) + + username = self.reauth_entry.data[CONF_USERNAME] + password = user_input[CONF_PASSWORD] + user_id, errors = await self.hass.async_add_executor_job( + _authenticate, username, password + ) + if user_id is None: + return self._show_reauth_form(errors) + + if self.reauth_entry.unique_id != user_id: + return self.async_abort(reason="wrong_account") + + data = { + CONF_USERNAME: username, + CONF_PASSWORD: user_input[CONF_PASSWORD], + } + self.hass.config_entries.async_update_entry(self.reauth_entry, data=data) + await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) + return self.async_abort(reason="reauth_successful") + + def _show_reauth_form(self, errors: dict[str, str]) -> FlowResult: + """Show the reauth form.""" + return self.async_show_form( + step_id="reauth_confirm", + data_schema=STEP_REAUTH_DATA_SCHEMA, + errors=errors, + ) + + +def _authenticate(username: str, password: str) -> tuple[str | None, dict[str, str]]: """Authenticate with the Schlage API.""" - auth = pyschlage.Auth(username, password) - auth.authenticate() - # The user_id property will make a blocking call if it's not already - # cached. To avoid blocking the event loop, we read it here. - return auth.user_id + user_id = None + errors: dict[str, str] = {} + try: + auth = pyschlage.Auth(username, password) + auth.authenticate() + except NotAuthorizedError: + errors["base"] = "invalid_auth" + except Exception: # pylint: disable=broad-except + LOGGER.exception("Unknown error") + errors["base"] = "unknown" + else: + # The user_id property will make a blocking call if it's not already + # cached. To avoid blocking the event loop, we read it here. + user_id = auth.user_id + return user_id, errors diff --git a/homeassistant/components/schlage/coordinator.py b/homeassistant/components/schlage/coordinator.py index 2b1e8460af2..3d736306d91 100644 --- a/homeassistant/components/schlage/coordinator.py +++ b/homeassistant/components/schlage/coordinator.py @@ -5,10 +5,11 @@ import asyncio from dataclasses import dataclass from pyschlage import Lock, Schlage -from pyschlage.exceptions import Error as SchlageError +from pyschlage.exceptions import Error as SchlageError, NotAuthorizedError from pyschlage.log import LockLog from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER, UPDATE_INTERVAL @@ -43,6 +44,8 @@ class SchlageDataUpdateCoordinator(DataUpdateCoordinator[SchlageData]): """Fetch the latest data from the Schlage API.""" try: locks = await self.hass.async_add_executor_job(self.api.locks) + except NotAuthorizedError as ex: + raise ConfigEntryAuthFailed from ex except SchlageError as ex: raise UpdateFailed("Failed to refresh Schlage data") from ex lock_data = await asyncio.gather( @@ -64,6 +67,8 @@ class SchlageDataUpdateCoordinator(DataUpdateCoordinator[SchlageData]): logs = previous_lock_data.logs try: logs = lock.logs() + except NotAuthorizedError as ex: + raise ConfigEntryAuthFailed from ex except SchlageError as ex: LOGGER.debug('Failed to read logs for lock "%s": %s', lock.name, ex) diff --git a/homeassistant/components/schlage/strings.json b/homeassistant/components/schlage/strings.json index 076ed97e298..721d9e80286 100644 --- a/homeassistant/components/schlage/strings.json +++ b/homeassistant/components/schlage/strings.json @@ -6,6 +6,13 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The Schlage integration needs to re-authenticate your account", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { @@ -13,7 +20,9 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "wrong_account": "The user credentials provided do not match this Schlage account." } }, "entity": { diff --git a/homeassistant/components/script/helpers.py b/homeassistant/components/script/helpers.py index 9f0d4399d3d..4504869e270 100644 --- a/homeassistant/components/script/helpers.py +++ b/homeassistant/components/script/helpers.py @@ -1,5 +1,6 @@ """Helpers for automation integration.""" from homeassistant.components.blueprint import DomainBlueprints +from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.singleton import singleton @@ -15,8 +16,15 @@ def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool: return len(scripts_with_blueprint(hass, blueprint_path)) > 0 +async def _reload_blueprint_scripts(hass: HomeAssistant, blueprint_path: str) -> None: + """Reload all script that rely on a specific blueprint.""" + await hass.services.async_call(DOMAIN, SERVICE_RELOAD) + + @singleton(DATA_BLUEPRINTS) @callback def async_get_blueprints(hass: HomeAssistant) -> DomainBlueprints: """Get script blueprints.""" - return DomainBlueprints(hass, DOMAIN, LOGGER, _blueprint_in_use) + return DomainBlueprints( + hass, DOMAIN, LOGGER, _blueprint_in_use, _reload_blueprint_scripts + ) diff --git a/homeassistant/components/sensibo/entity.py b/homeassistant/components/sensibo/entity.py index 9f20c051576..0a60fc4a85d 100644 --- a/homeassistant/components/sensibo/entity.py +++ b/homeassistant/components/sensibo/entity.py @@ -23,22 +23,24 @@ def async_handle_api_call( ) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, Any]]: """Decorate api calls.""" - async def wrap_api_call(*args: Any, **kwargs: Any) -> None: + async def wrap_api_call(entity: _T, *args: _P.args, **kwargs: _P.kwargs) -> None: """Wrap services for api calls.""" res: bool = False try: async with asyncio.timeout(TIMEOUT): - res = await function(*args, **kwargs) + res = await function(entity, *args, **kwargs) except SENSIBO_ERRORS as err: raise HomeAssistantError from err - LOGGER.debug("Result %s for entity %s with arguments %s", res, args[0], kwargs) - entity: SensiboDeviceBaseEntity = args[0] + LOGGER.debug("Result %s for entity %s with arguments %s", res, entity, kwargs) if res is not True: raise HomeAssistantError(f"Could not execute service for {entity.name}") - if kwargs.get("key") is not None and kwargs.get("value") is not None: - setattr(entity.device_data, kwargs["key"], kwargs["value"]) - LOGGER.debug("Debug check key %s is now %s", kwargs["key"], kwargs["value"]) + if ( + isinstance(key := kwargs.get("key"), str) + and (value := kwargs.get("value")) is not None + ): + setattr(entity.device_data, key, value) + LOGGER.debug("Debug check key %s is now %s", key, value) entity.async_write_ha_state() await entity.coordinator.async_request_refresh() diff --git a/homeassistant/components/sentry/manifest.json b/homeassistant/components/sentry/manifest.json index 3828a868649..2af110564e7 100644 --- a/homeassistant/components/sentry/manifest.json +++ b/homeassistant/components/sentry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sentry", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["sentry-sdk==1.34.0"] + "requirements": ["sentry-sdk==1.37.1"] } diff --git a/homeassistant/components/sfr_box/diagnostics.py b/homeassistant/components/sfr_box/diagnostics.py index 1fb98053267..e0e84a7ec1a 100644 --- a/homeassistant/components/sfr_box/diagnostics.py +++ b/homeassistant/components/sfr_box/diagnostics.py @@ -27,16 +27,28 @@ async def async_get_config_entry_diagnostics( }, "data": { "dsl": async_redact_data( - dataclasses.asdict(await data.system.box.dsl_get_info()), TO_REDACT + dataclasses.asdict( + await data.system.box.dsl_get_info() # type:ignore [call-overload] + ), + TO_REDACT, ), "ftth": async_redact_data( - dataclasses.asdict(await data.system.box.ftth_get_info()), TO_REDACT + dataclasses.asdict( + await data.system.box.ftth_get_info() # type:ignore [call-overload] + ), + TO_REDACT, ), "system": async_redact_data( - dataclasses.asdict(await data.system.box.system_get_info()), TO_REDACT + dataclasses.asdict( + await data.system.box.system_get_info() # type:ignore [call-overload] + ), + TO_REDACT, ), "wan": async_redact_data( - dataclasses.asdict(await data.system.box.wan_get_info()), TO_REDACT + dataclasses.asdict( + await data.system.box.wan_get_info() # type:ignore [call-overload] + ), + TO_REDACT, ), }, } diff --git a/homeassistant/components/sfr_box/manifest.json b/homeassistant/components/sfr_box/manifest.json index eb3c9cb1b68..bf4d91a50f1 100644 --- a/homeassistant/components/sfr_box/manifest.json +++ b/homeassistant/components/sfr_box/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sfr_box", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["sfrbox-api==0.0.6"] + "requirements": ["sfrbox-api==0.0.8"] } diff --git a/homeassistant/components/sfr_box/sensor.py b/homeassistant/components/sfr_box/sensor.py index 1c4540b1c74..f56a9765618 100644 --- a/homeassistant/components/sfr_box/sensor.py +++ b/homeassistant/components/sfr_box/sensor.py @@ -188,7 +188,7 @@ SYSTEM_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[SystemInfo], ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda x: x.temperature / 1000, + value_fn=lambda x: None if x.temperature is None else x.temperature / 1000, ), ) WAN_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[WanInfo], ...] = ( diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index 5efc5c849d7..b29fdcc6d19 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -73,6 +73,7 @@ BLOCK_SLEEPING_PLATFORMS: Final = [ RPC_PLATFORMS: Final = [ Platform.BINARY_SENSOR, Platform.BUTTON, + Platform.CLIMATE, Platform.COVER, Platform.EVENT, Platform.LIGHT, diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index 35c18511860..6a592c904f6 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -37,9 +37,12 @@ from .const import ( DOMAIN, LOGGER, NOT_CALIBRATED_ISSUE_ID, + RPC_THERMOSTAT_SETTINGS, SHTRV_01_TEMPERATURE_SETTINGS, ) -from .coordinator import ShellyBlockCoordinator, get_entry_data +from .coordinator import ShellyBlockCoordinator, ShellyRpcCoordinator, get_entry_data +from .entity import ShellyRpcEntity +from .utils import async_remove_shelly_entity, get_device_entry_gen, get_rpc_key_ids async def async_setup_entry( @@ -48,6 +51,9 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up climate device.""" + if get_device_entry_gen(config_entry) == 2: + return async_setup_rpc_entry(hass, config_entry, async_add_entities) + coordinator = get_entry_data(hass)[config_entry.entry_id].block assert coordinator if coordinator.device.initialized: @@ -105,6 +111,33 @@ def async_restore_climate_entities( break +@callback +def async_setup_rpc_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up entities for RPC device.""" + coordinator = get_entry_data(hass)[config_entry.entry_id].rpc + assert coordinator + climate_key_ids = get_rpc_key_ids(coordinator.device.status, "thermostat") + + climate_ids = [] + for id_ in climate_key_ids: + climate_ids.append(id_) + + if coordinator.device.shelly.get("relay_in_thermostat", False): + # Wall Display relay is used as the thermostat actuator, + # we need to remove a switch entity + unique_id = f"{coordinator.mac}-switch:{id_}" + async_remove_shelly_entity(hass, "switch", unique_id) + + if not climate_ids: + return + + async_add_entities(RpcClimate(coordinator, id_) for id_ in climate_ids) + + @dataclass class ShellyClimateExtraStoredData(ExtraStoredData): """Object to hold extra stored data.""" @@ -381,3 +414,74 @@ class BlockSleepingClimate( self.coordinator.entry.async_start_reauth(self.hass) else: self.async_write_ha_state() + + +class RpcClimate(ShellyRpcEntity, ClimateEntity): + """Entity that controls a thermostat on RPC based Shelly devices.""" + + _attr_hvac_modes = [HVACMode.OFF] + _attr_icon = "mdi:thermostat" + _attr_max_temp = RPC_THERMOSTAT_SETTINGS["max"] + _attr_min_temp = RPC_THERMOSTAT_SETTINGS["min"] + _attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE + _attr_target_temperature_step = RPC_THERMOSTAT_SETTINGS["step"] + _attr_temperature_unit = UnitOfTemperature.CELSIUS + + def __init__(self, coordinator: ShellyRpcCoordinator, id_: int) -> None: + """Initialize.""" + super().__init__(coordinator, f"thermostat:{id_}") + self._id = id_ + self._thermostat_type = coordinator.device.config[f"thermostat:{id_}"].get( + "type", "heating" + ) + if self._thermostat_type == "cooling": + self._attr_hvac_modes.append(HVACMode.COOL) + else: + self._attr_hvac_modes.append(HVACMode.HEAT) + + @property + def target_temperature(self) -> float | None: + """Set target temperature.""" + return cast(float, self.status["target_C"]) + + @property + def current_temperature(self) -> float | None: + """Return current temperature.""" + return cast(float, self.status["current_C"]) + + @property + def hvac_mode(self) -> HVACMode: + """HVAC current mode.""" + if not self.status["enable"]: + return HVACMode.OFF + + return HVACMode.COOL if self._thermostat_type == "cooling" else HVACMode.HEAT + + @property + def hvac_action(self) -> HVACAction: + """HVAC current action.""" + if not self.status["output"]: + return HVACAction.IDLE + + return ( + HVACAction.COOLING + if self._thermostat_type == "cooling" + else HVACAction.HEATING + ) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if (target_temp := kwargs.get(ATTR_TEMPERATURE)) is None: + return + + await self.call_rpc( + "Thermostat.SetConfig", + {"config": {"id": self._id, "target_C": target_temp}}, + ) + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set hvac mode.""" + mode = hvac_mode in (HVACMode.COOL, HVACMode.HEAT) + await self.call_rpc( + "Thermostat.SetConfig", {"config": {"id": self._id, "enable": mode}} + ) diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index bad13fde006..6cde265bc25 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -29,6 +29,7 @@ from .const import ( CONF_SLEEP_PERIOD, DOMAIN, LOGGER, + MODEL_WALL_DISPLAY, BLEScannerMode, ) from .coordinator import async_reconnect_soon, get_entry_data @@ -363,8 +364,10 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_supports_options_flow(cls, config_entry: ConfigEntry) -> bool: """Return options flow support for this handler.""" - return config_entry.data.get("gen") == 2 and not config_entry.data.get( - CONF_SLEEP_PERIOD + return ( + config_entry.data.get("gen") == 2 + and not config_entry.data.get(CONF_SLEEP_PERIOD) + and config_entry.data.get("model") != MODEL_WALL_DISPLAY ) diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index 0275b805208..a90aba8db62 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -6,6 +6,22 @@ from logging import Logger, getLogger import re from typing import Final +from aioshelly.const import ( + MODEL_BULB, + MODEL_BULB_RGBW, + MODEL_BUTTON1, + MODEL_BUTTON1_V2, + MODEL_DIMMER, + MODEL_DIMMER_2, + MODEL_DUO, + MODEL_GAS, + MODEL_MOTION, + MODEL_MOTION_2, + MODEL_RGBW2, + MODEL_VALVE, + MODEL_VINTAGE_V2, + MODEL_WALL_DISPLAY, +) from awesomeversion import AwesomeVersion DOMAIN: Final = "shelly" @@ -24,29 +40,29 @@ LIGHT_TRANSITION_MIN_FIRMWARE_DATE: Final = 20210226 MAX_TRANSITION_TIME: Final = 5000 RGBW_MODELS: Final = ( - "SHBLB-1", - "SHRGBW2", + MODEL_BULB, + MODEL_RGBW2, ) MODELS_SUPPORTING_LIGHT_TRANSITION: Final = ( - "SHBDUO-1", - "SHCB-1", - "SHDM-1", - "SHDM-2", - "SHRGBW2", - "SHVIN-1", + MODEL_DUO, + MODEL_BULB_RGBW, + MODEL_DIMMER, + MODEL_DIMMER_2, + MODEL_RGBW2, + MODEL_VINTAGE_V2, ) MODELS_SUPPORTING_LIGHT_EFFECTS: Final = ( - "SHBLB-1", - "SHCB-1", - "SHRGBW2", + MODEL_BULB, + MODEL_BULB_RGBW, + MODEL_RGBW2, ) # Bulbs that support white & color modes DUAL_MODE_LIGHT_MODELS: Final = ( - "SHBLB-1", - "SHCB-1", + MODEL_BULB, + MODEL_BULB_RGBW, ) # Refresh interval for REST sensors @@ -79,7 +95,11 @@ INPUTS_EVENTS_DICT: Final = { } # List of battery devices that maintain a permanent WiFi connection -BATTERY_DEVICES_WITH_PERMANENT_CONNECTION: Final = ["SHMOS-01"] +BATTERY_DEVICES_WITH_PERMANENT_CONNECTION: Final = [ + MODEL_MOTION, + MODEL_MOTION_2, + MODEL_VALVE, +] # Button/Click events for Block & RPC devices EVENT_SHELLY_CLICK: Final = "shelly.click" @@ -124,7 +144,7 @@ INPUTS_EVENTS_SUBTYPES: Final = { "button4": 4, } -SHBTN_MODELS: Final = ["SHBTN-1", "SHBTN-2"] +SHBTN_MODELS: Final = [MODEL_BUTTON1, MODEL_BUTTON1_V2] STANDARD_RGB_EFFECTS: Final = { 0: "Off", @@ -149,6 +169,11 @@ SHTRV_01_TEMPERATURE_SETTINGS: Final = { "step": 0.5, "default": 20.0, } +RPC_THERMOSTAT_SETTINGS: Final = { + "min": 5, + "max": 35, + "step": 0.5, +} # Kelvin value for colorTemp KELVIN_MAX_VALUE: Final = 6500 @@ -160,7 +185,7 @@ UPTIME_DEVIATION: Final = 5 # Time to wait before reloading entry upon device config change ENTRY_RELOAD_COOLDOWN = 60 -SHELLY_GAS_MODELS = ["SHGS-1"] +SHELLY_GAS_MODELS = [MODEL_GAS] BLE_MIN_VERSION = AwesomeVersion("0.12.0-beta2") @@ -186,3 +211,12 @@ OTA_BEGIN = "ota_begin" OTA_ERROR = "ota_error" OTA_PROGRESS = "ota_progress" OTA_SUCCESS = "ota_success" + +GEN1_RELEASE_URL = "https://shelly-api-docs.shelly.cloud/gen1/#changelog" +GEN2_RELEASE_URL = "https://shelly-api-docs.shelly.cloud/gen2/changelog/" +DEVICES_WITHOUT_FIRMWARE_CHANGELOG = ( + MODEL_WALL_DISPLAY, + MODEL_MOTION, + MODEL_MOTION_2, + MODEL_VALVE, +) diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index e648a80420a..d1f9d6943bf 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -10,6 +10,7 @@ from typing import Any, Generic, TypeVar, cast import aioshelly from aioshelly.ble import async_ensure_ble_enabled, async_stop_scanner from aioshelly.block_device import BlockDevice, BlockUpdateType +from aioshelly.const import MODEL_VALVE from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError from aioshelly.rpc_device import RpcDevice, RpcUpdateType from awesomeversion import AwesomeVersion @@ -219,7 +220,7 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]): # Shelly TRV sends information about changing the configuration for no # reason, reloading the config entry is not needed for it. - if self.model == "SHTRV-01": + if self.model == MODEL_VALVE: self._last_cfg_changed = None # For dual mode bulbs ignore change if it is due to mode/effect change @@ -583,7 +584,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): ble_scanner_mode = self.entry.options.get( CONF_BLE_SCANNER_MODE, BLEScannerMode.DISABLED ) - if ble_scanner_mode == BLEScannerMode.DISABLED: + if ble_scanner_mode == BLEScannerMode.DISABLED and self.connected: await async_stop_scanner(self.device) return if AwesomeVersion(self.device.version) < BLE_MIN_VERSION: diff --git a/homeassistant/components/shelly/event.py b/homeassistant/components/shelly/event.py index 1b5cf911e85..af323c82a24 100644 --- a/homeassistant/components/shelly/event.py +++ b/homeassistant/components/shelly/event.py @@ -6,6 +6,7 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Final from aioshelly.block_device import Block +from aioshelly.const import MODEL_I3 from homeassistant.components.event import ( DOMAIN as EVENT_DOMAIN, @@ -135,7 +136,7 @@ class ShellyBlockEvent(ShellyBlockEntity, EventEntity): self.channel = channel = int(block.channel or 0) + 1 self._attr_unique_id = f"{super().unique_id}-{channel}" - if coordinator.model == "SHIX3-1": + if coordinator.model == MODEL_I3: self._attr_event_types = list(SHIX3_1_INPUTS_EVENTS_TYPES) else: self._attr_event_types = list(BASIC_INPUTS_EVENTS_TYPES) diff --git a/homeassistant/components/shelly/light.py b/homeassistant/components/shelly/light.py index 1c3a85f2f5e..829a60b3a9e 100644 --- a/homeassistant/components/shelly/light.py +++ b/homeassistant/components/shelly/light.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Any, cast from aioshelly.block_device import Block +from aioshelly.const import MODEL_BULB from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -254,7 +255,7 @@ class BlockShellyLight(ShellyBlockEntity, LightEntity): @property def effect_list(self) -> list[str] | None: """Return the list of supported effects.""" - if self.coordinator.model == "SHBLB-1": + if self.coordinator.model == MODEL_BULB: return list(SHBLB_1_RGB_EFFECTS.values()) return list(STANDARD_RGB_EFFECTS.values()) @@ -267,7 +268,7 @@ class BlockShellyLight(ShellyBlockEntity, LightEntity): else: effect_index = self.block.effect - if self.coordinator.model == "SHBLB-1": + if self.coordinator.model == MODEL_BULB: return SHBLB_1_RGB_EFFECTS[effect_index] return STANDARD_RGB_EFFECTS[effect_index] @@ -326,7 +327,7 @@ class BlockShellyLight(ShellyBlockEntity, LightEntity): if ATTR_EFFECT in kwargs and ATTR_COLOR_TEMP_KELVIN not in kwargs: # Color effect change - used only in color mode, switch device mode to color set_mode = "color" - if self.coordinator.model == "SHBLB-1": + if self.coordinator.model == MODEL_BULB: effect_dict = SHBLB_1_RGB_EFFECTS else: effect_dict = STANDARD_RGB_EFFECTS diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index c76e2102fa1..b8185712d31 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==6.0.0"], + "requirements": ["aioshelly==6.1.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index b12ad3e4823..9230ae605e0 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -6,6 +6,9 @@ "description": "Before setup, battery-powered devices must be woken up, you can now wake the device up using a button on it.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the Shelly device to connect to." } }, "credentials": { diff --git a/homeassistant/components/shelly/switch.py b/homeassistant/components/shelly/switch.py index 395b386993a..5a398182e4d 100644 --- a/homeassistant/components/shelly/switch.py +++ b/homeassistant/components/shelly/switch.py @@ -5,13 +5,14 @@ from dataclasses import dataclass from typing import Any, cast from aioshelly.block_device import Block +from aioshelly.const import MODEL_2, MODEL_25, MODEL_GAS from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import GAS_VALVE_OPEN_STATES +from .const import GAS_VALVE_OPEN_STATES, MODEL_WALL_DISPLAY from .coordinator import ShellyBlockCoordinator, ShellyRpcCoordinator, get_entry_data from .entity import ( BlockEntityDescription, @@ -65,7 +66,7 @@ def async_setup_block_entry( assert coordinator # Add Shelly Gas Valve as a switch - if coordinator.model == "SHGS-1": + if coordinator.model == MODEL_GAS: async_setup_block_attribute_entities( hass, async_add_entities, @@ -77,7 +78,7 @@ def async_setup_block_entry( # In roller mode the relay blocks exist but do not contain required info if ( - coordinator.model in ["SHSW-21", "SHSW-25"] + coordinator.model in [MODEL_2, MODEL_25] and coordinator.device.settings["mode"] != "relay" ): return @@ -116,6 +117,15 @@ def async_setup_rpc_entry( if is_rpc_channel_type_light(coordinator.device.config, id_): continue + if coordinator.model == MODEL_WALL_DISPLAY: + if not coordinator.device.shelly.get("relay_in_thermostat", False): + # Wall Display relay is not used as the thermostat actuator, + # we need to remove a climate entity + unique_id = f"{coordinator.mac}-thermostat:{id_}" + async_remove_shelly_entity(hass, "climate", unique_id) + else: + continue + switch_ids.append(id_) unique_id = f"{coordinator.mac}-switch:{id_}" async_remove_shelly_entity(hass, "light", unique_id) diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index d4528f55288..9e52a292108 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -34,7 +34,7 @@ from .entity import ( async_setup_entry_rest, async_setup_entry_rpc, ) -from .utils import get_device_entry_gen +from .utils import get_device_entry_gen, get_release_url LOGGER = logging.getLogger(__name__) @@ -156,10 +156,15 @@ class RestUpdateEntity(ShellyRestAttributeEntity, UpdateEntity): self, block_coordinator: ShellyBlockCoordinator, attribute: str, - description: RestEntityDescription, + description: RestUpdateDescription, ) -> None: """Initialize update entity.""" super().__init__(block_coordinator, attribute, description) + self._attr_release_url = get_release_url( + block_coordinator.device.gen, + block_coordinator.model, + description.beta, + ) self._in_progress_old_version: str | None = None @property @@ -225,11 +230,14 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): coordinator: ShellyRpcCoordinator, key: str, attribute: str, - description: RpcEntityDescription, + description: RpcUpdateDescription, ) -> None: """Initialize update entity.""" super().__init__(coordinator, key, attribute, description) self._ota_in_progress: bool = False + self._attr_release_url = get_release_url( + coordinator.device.gen, coordinator.model, description.beta + ) async def async_added_to_hass(self) -> None: """When entity is added to hass.""" @@ -336,3 +344,15 @@ class RpcSleepingUpdateEntity( return None return self.last_state.attributes.get(ATTR_LATEST_VERSION) + + @property + def release_url(self) -> str | None: + """URL to the full release notes.""" + if not self.coordinator.device.initialized: + return None + + return get_release_url( + self.coordinator.device.gen, + self.coordinator.model, + self.entity_description.beta, + ) diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index 4d25812361c..6b5c59f28db 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -6,7 +6,14 @@ from typing import Any, cast from aiohttp.web import Request, WebSocketResponse from aioshelly.block_device import COAP, Block, BlockDevice -from aioshelly.const import MODEL_NAMES +from aioshelly.const import ( + MODEL_1L, + MODEL_DIMMER, + MODEL_DIMMER_2, + MODEL_EM3, + MODEL_I3, + MODEL_NAMES, +) from aioshelly.rpc_device import RpcDevice, WsServer from homeassistant.components.http import HomeAssistantView @@ -26,7 +33,10 @@ from .const import ( BASIC_INPUTS_EVENTS_TYPES, CONF_COAP_PORT, DEFAULT_COAP_PORT, + DEVICES_WITHOUT_FIRMWARE_CHANGELOG, DOMAIN, + GEN1_RELEASE_URL, + GEN2_RELEASE_URL, LOGGER, RPC_INPUTS_EVENTS_TYPES, SHBTN_INPUTS_EVENTS_TYPES, @@ -54,7 +64,11 @@ def get_number_of_channels(device: BlockDevice, block: Block) -> int: if block.type == "input": # Shelly Dimmer/1L has two input channels and missing "num_inputs" - if device.settings["device"]["type"] in ["SHDM-1", "SHDM-2", "SHSW-L"]: + if device.settings["device"]["type"] in [ + MODEL_DIMMER, + MODEL_DIMMER_2, + MODEL_1L, + ]: channels = 2 else: channels = device.shelly.get("num_inputs") @@ -103,7 +117,7 @@ def get_block_channel_name(device: BlockDevice, block: Block | None) -> str: if channel_name: return channel_name - if device.settings["device"]["type"] == "SHEM-3": + if device.settings["device"]["type"] == MODEL_EM3: base = ord("A") else: base = ord("1") @@ -133,7 +147,7 @@ def is_block_momentary_input( return False # Shelly 1L has two button settings in the first channel - if settings["device"]["type"] == "SHSW-L": + if settings["device"]["type"] == MODEL_1L: channel = int(block.channel or 0) + 1 button_type = button[0].get("btn" + str(channel) + "_type") else: @@ -177,7 +191,7 @@ def get_block_input_triggers( if device.settings["device"]["type"] in SHBTN_MODELS: trigger_types = SHBTN_INPUTS_EVENTS_TYPES - elif device.settings["device"]["type"] == "SHIX3-1": + elif device.settings["device"]["type"] == MODEL_I3: trigger_types = SHIX3_1_INPUTS_EVENTS_TYPES else: trigger_types = BASIC_INPUTS_EVENTS_TYPES @@ -408,3 +422,11 @@ def mac_address_from_name(name: str) -> str | None: """Convert a name to a mac address.""" mac = name.partition(".")[0].partition("-")[-1] return mac.upper() if len(mac) == 12 else None + + +def get_release_url(gen: int, model: str, beta: bool) -> str | None: + """Return release URL or None.""" + if beta or model in DEVICES_WITHOUT_FIRMWARE_CHANGELOG: + return None + + return GEN1_RELEASE_URL if gen == 1 else GEN2_RELEASE_URL diff --git a/homeassistant/components/simplisafe/__init__.py b/homeassistant/components/simplisafe/__init__.py index 7b57fa1fc32..b1bd2c8e9d6 100644 --- a/homeassistant/components/simplisafe/__init__.py +++ b/homeassistant/components/simplisafe/__init__.py @@ -2,7 +2,7 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Iterable +from collections.abc import Callable, Coroutine, Iterable from datetime import timedelta from typing import Any, cast @@ -336,7 +336,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @callback - def extract_system(func: Callable) -> Callable: + def extract_system( + func: Callable[[ServiceCall, SystemType], Coroutine[Any, Any, None]] + ) -> Callable[[ServiceCall], Coroutine[Any, Any, None]]: """Define a decorator to get the correct system for a service call.""" async def wrapper(call: ServiceCall) -> None: diff --git a/homeassistant/components/smartthings/climate.py b/homeassistant/components/smartthings/climate.py index 52a02aca745..16558d2c795 100644 --- a/homeassistant/components/smartthings/climate.py +++ b/homeassistant/components/smartthings/climate.py @@ -13,6 +13,10 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN as CLIMATE_DOMAIN, + SWING_BOTH, + SWING_HORIZONTAL, + SWING_OFF, + SWING_VERTICAL, ClimateEntity, ClimateEntityFeature, HVACAction, @@ -71,6 +75,20 @@ STATE_TO_AC_MODE = { HVACMode.FAN_ONLY: "fanOnly", } +SWING_TO_FAN_OSCILLATION = { + SWING_BOTH: "all", + SWING_HORIZONTAL: "horizontal", + SWING_VERTICAL: "vertical", + SWING_OFF: "fixed", +} + +FAN_OSCILLATION_TO_SWING = { + value: key for key, value in SWING_TO_FAN_OSCILLATION.items() +} + + +WINDFREE = "windFree" + UNIT_MAP = {"C": UnitOfTemperature.CELSIUS, "F": UnitOfTemperature.FAHRENHEIT} _LOGGER = logging.getLogger(__name__) @@ -322,18 +340,34 @@ class SmartThingsThermostat(SmartThingsEntity, ClimateEntity): class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): """Define a SmartThings Air Conditioner.""" - _attr_supported_features = ( - ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.FAN_MODE - ) + _hvac_modes: list[HVACMode] - def __init__(self, device): + def __init__(self, device) -> None: """Init the class.""" super().__init__(device) - self._hvac_modes = None + self._hvac_modes = [] + self._attr_preset_mode = None + self._attr_preset_modes = self._determine_preset_modes() + self._attr_swing_modes = self._determine_swing_modes() + self._attr_supported_features = self._determine_supported_features() + + def _determine_supported_features(self) -> ClimateEntityFeature: + features = ( + ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.FAN_MODE + ) + if self._device.get_capability(Capability.fan_oscillation_mode): + features |= ClimateEntityFeature.SWING_MODE + if (self._attr_preset_modes is not None) and len(self._attr_preset_modes) > 0: + features |= ClimateEntityFeature.PRESET_MODE + return features async def async_set_fan_mode(self, fan_mode: str) -> None: """Set new target fan mode.""" await self._device.set_fan_mode(fan_mode, set_status=True) + + # setting the fan must reset the preset mode (it deactivates the windFree function) + self._attr_preset_mode = None + # State is set optimistically in the command above, therefore update # the entity state ahead of receiving the confirming push updates self.async_write_ha_state() @@ -407,12 +441,12 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): self._hvac_modes = list(modes) @property - def current_temperature(self): + def current_temperature(self) -> float | None: """Return the current temperature.""" return self._device.status.temperature @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return device specific state attributes. Include attributes from the Demand Response Load Control (drlc) @@ -432,12 +466,12 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): return state_attributes @property - def fan_mode(self): + def fan_mode(self) -> str: """Return the fan setting.""" return self._device.status.fan_mode @property - def fan_modes(self): + def fan_modes(self) -> list[str]: """Return the list of available fan modes.""" return self._device.status.supported_ac_fan_modes @@ -454,11 +488,62 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): return self._hvac_modes @property - def target_temperature(self): + def target_temperature(self) -> float: """Return the temperature we try to reach.""" return self._device.status.cooling_setpoint @property - def temperature_unit(self): + def temperature_unit(self) -> str: """Return the unit of measurement.""" - return UNIT_MAP.get(self._device.status.attributes[Attribute.temperature].unit) + return UNIT_MAP[self._device.status.attributes[Attribute.temperature].unit] + + def _determine_swing_modes(self) -> list[str]: + """Return the list of available swing modes.""" + supported_modes = self._device.status.attributes[ + Attribute.supported_fan_oscillation_modes + ][0] + supported_swings = [ + FAN_OSCILLATION_TO_SWING.get(m, SWING_OFF) for m in supported_modes + ] + return supported_swings + + async def async_set_swing_mode(self, swing_mode: str) -> None: + """Set swing mode.""" + fan_oscillation_mode = SWING_TO_FAN_OSCILLATION[swing_mode] + await self._device.set_fan_oscillation_mode(fan_oscillation_mode) + + # setting the fan must reset the preset mode (it deactivates the windFree function) + self._attr_preset_mode = None + + self.async_schedule_update_ha_state(True) + + @property + def swing_mode(self) -> str: + """Return the swing setting.""" + return FAN_OSCILLATION_TO_SWING.get( + self._device.status.fan_oscillation_mode, SWING_OFF + ) + + def _determine_preset_modes(self) -> list[str] | None: + """Return a list of available preset modes.""" + supported_modes = self._device.status.attributes[ + "supportedAcOptionalMode" + ].value + if WINDFREE in supported_modes: + return [WINDFREE] + return None + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set special modes (currently only windFree is supported).""" + result = await self._device.command( + "main", + "custom.airConditionerOptionalMode", + "setAcOptionalMode", + [preset_mode], + ) + if result: + self._device.status.update_attribute_value("acOptionalMode", preset_mode) + + self._attr_preset_mode = preset_mode + + self.async_write_ha_state() diff --git a/homeassistant/components/smarttub/manifest.json b/homeassistant/components/smarttub/manifest.json index e8db096f31d..f2514063a40 100644 --- a/homeassistant/components/smarttub/manifest.json +++ b/homeassistant/components/smarttub/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["smarttub"], "quality_scale": "platinum", - "requirements": ["python-smarttub==0.0.35"] + "requirements": ["python-smarttub==0.0.36"] } diff --git a/homeassistant/components/smtp/notify.py b/homeassistant/components/smtp/notify.py index 6836a0b9f6b..6b960409305 100644 --- a/homeassistant/components/smtp/notify.py +++ b/homeassistant/components/smtp/notify.py @@ -185,9 +185,8 @@ class MailNotificationService(BaseNotificationService): def send_message(self, message="", **kwargs): """Build and send a message to a user. - Will send plain text normally, or will build a multipart HTML message - with inline image attachments if images config is defined, or will - build a multipart HTML if html config is defined. + Will send plain text normally, with pictures as attachments if images config is + defined, or will build a multipart HTML if html config is defined. """ subject = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) @@ -242,8 +241,12 @@ def _build_text_msg(message): return MIMEText(message) -def _attach_file(atch_name, content_id): - """Create a message attachment.""" +def _attach_file(atch_name, content_id=""): + """Create a message attachment. + + If MIMEImage is successful and content_id is passed (HTML), add images in-line. + Otherwise add them as attachments. + """ try: with open(atch_name, "rb") as attachment_file: file_bytes = attachment_file.read() @@ -258,32 +261,34 @@ def _attach_file(atch_name, content_id): "Attachment %s has an unknown MIME type. Falling back to file", atch_name, ) - attachment = MIMEApplication(file_bytes, Name=atch_name) - attachment["Content-Disposition"] = f'attachment; filename="{atch_name}"' + attachment = MIMEApplication(file_bytes, Name=os.path.basename(atch_name)) + attachment[ + "Content-Disposition" + ] = f'attachment; filename="{os.path.basename(atch_name)}"' + else: + if content_id: + attachment.add_header("Content-ID", f"<{content_id}>") + else: + attachment.add_header( + "Content-Disposition", + f"attachment; filename={os.path.basename(atch_name)}", + ) - attachment.add_header("Content-ID", f"<{content_id}>") return attachment def _build_multipart_msg(message, images): - """Build Multipart message with in-line images.""" - _LOGGER.debug("Building multipart email with embedded attachment(s)") - msg = MIMEMultipart("related") - msg_alt = MIMEMultipart("alternative") - msg.attach(msg_alt) + """Build Multipart message with images as attachments.""" + _LOGGER.debug("Building multipart email with image attachment(s)") + msg = MIMEMultipart() body_txt = MIMEText(message) - msg_alt.attach(body_txt) - body_text = [f"

{message}


"] + msg.attach(body_txt) - for atch_num, atch_name in enumerate(images): - cid = f"image{atch_num}" - body_text.append(f'
') - attachment = _attach_file(atch_name, cid) + for atch_name in images: + attachment = _attach_file(atch_name) if attachment: msg.attach(attachment) - body_html = MIMEText("".join(body_text), "html") - msg_alt.attach(body_html) return msg diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 49caafcc774..27059bba180 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -280,9 +280,9 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): async def _async_fallback_poll(self) -> None: """Retrieve latest state by polling.""" - await self.hass.data[DATA_SONOS].favorites[ - self.speaker.household_id - ].async_poll() + await ( + self.hass.data[DATA_SONOS].favorites[self.speaker.household_id].async_poll() + ) await self.hass.async_add_executor_job(self._update) def _update(self) -> None: diff --git a/homeassistant/components/ssdp/__init__.py b/homeassistant/components/ssdp/__init__.py index ded663af897..a2df2c313cd 100644 --- a/homeassistant/components/ssdp/__init__.py +++ b/homeassistant/components/ssdp/__init__.py @@ -117,6 +117,7 @@ class SsdpServiceInfo(BaseServiceInfo): ssdp_ext: str | None = None ssdp_server: str | None = None ssdp_headers: Mapping[str, Any] = field(default_factory=dict) + ssdp_all_locations: set[str] = field(default_factory=set) x_homeassistant_matching_domains: set[str] = field(default_factory=set) @@ -283,6 +284,7 @@ class Scanner: self.hass = hass self._cancel_scan: Callable[[], None] | None = None self._ssdp_listeners: list[SsdpListener] = [] + self._device_tracker = SsdpDeviceTracker() self._callbacks: list[tuple[SsdpCallback, dict[str, str]]] = [] self._description_cache: DescriptionCache | None = None self.integration_matchers = integration_matchers @@ -290,21 +292,7 @@ class Scanner: @property def _ssdp_devices(self) -> list[SsdpDevice]: """Get all seen devices.""" - return [ - ssdp_device - for ssdp_listener in self._ssdp_listeners - for ssdp_device in ssdp_listener.devices.values() - ] - - @property - def _all_headers_from_ssdp_devices( - self, - ) -> dict[tuple[str, str], CaseInsensitiveDict]: - return { - (ssdp_device.udn, dst): headers - for ssdp_device in self._ssdp_devices - for dst, headers in ssdp_device.all_combined_headers.items() - } + return list(self._device_tracker.devices.values()) async def async_register_callback( self, callback: SsdpCallback, match_dict: None | dict[str, str] = None @@ -317,13 +305,16 @@ class Scanner: # Make sure any entries that happened # before the callback was registered are fired - for headers in self._all_headers_from_ssdp_devices.values(): - if _async_headers_match(headers, lower_match_dict): - await _async_process_callbacks( - [callback], - await self._async_headers_to_discovery_info(headers), - SsdpChange.ALIVE, - ) + for ssdp_device in self._ssdp_devices: + for headers in ssdp_device.all_combined_headers.values(): + if _async_headers_match(headers, lower_match_dict): + await _async_process_callbacks( + [callback], + await self._async_headers_to_discovery_info( + ssdp_device, headers + ), + SsdpChange.ALIVE, + ) callback_entry = (callback, lower_match_dict) self._callbacks.append(callback_entry) @@ -386,7 +377,6 @@ class Scanner: async def _async_start_ssdp_listeners(self) -> None: """Start the SSDP Listeners.""" # Devices are shared between all sources. - device_tracker = SsdpDeviceTracker() for source_ip in await async_build_source_set(self.hass): source_ip_str = str(source_ip) if source_ip.version == 6: @@ -405,7 +395,7 @@ class Scanner: callback=self._ssdp_listener_callback, source=source, target=target, - device_tracker=device_tracker, + device_tracker=self._device_tracker, ) ) results = await asyncio.gather( @@ -454,14 +444,16 @@ class Scanner: if info_desc is None: # Fetch info desc in separate task and process from there. self.hass.async_create_task( - self._ssdp_listener_process_with_lookup(ssdp_device, dst, source) + self._ssdp_listener_process_callback_with_lookup( + ssdp_device, dst, source + ) ) return # Info desc known, process directly. - self._ssdp_listener_process(ssdp_device, dst, source, info_desc) + self._ssdp_listener_process_callback(ssdp_device, dst, source, info_desc) - async def _ssdp_listener_process_with_lookup( + async def _ssdp_listener_process_callback_with_lookup( self, ssdp_device: SsdpDevice, dst: DeviceOrServiceType, @@ -469,14 +461,14 @@ class Scanner: ) -> None: """Handle a device/service change.""" location = ssdp_device.location - self._ssdp_listener_process( + self._ssdp_listener_process_callback( ssdp_device, dst, source, await self._async_get_description_dict(location), ) - def _ssdp_listener_process( + def _ssdp_listener_process_callback( self, ssdp_device: SsdpDevice, dst: DeviceOrServiceType, @@ -502,7 +494,7 @@ class Scanner: return discovery_info = discovery_info_from_headers_and_description( - combined_headers, info_desc + ssdp_device, combined_headers, info_desc ) discovery_info.x_homeassistant_matching_domains = matching_domains @@ -557,7 +549,7 @@ class Scanner: return await self._description_cache.async_get_description_dict(location) or {} async def _async_headers_to_discovery_info( - self, headers: CaseInsensitiveDict + self, ssdp_device: SsdpDevice, headers: CaseInsensitiveDict ) -> SsdpServiceInfo: """Combine the headers and description into discovery_info. @@ -567,34 +559,42 @@ class Scanner: location = headers["location"] info_desc = await self._async_get_description_dict(location) - return discovery_info_from_headers_and_description(headers, info_desc) + return discovery_info_from_headers_and_description( + ssdp_device, headers, info_desc + ) async def async_get_discovery_info_by_udn_st( self, udn: str, st: str ) -> SsdpServiceInfo | None: """Return discovery_info for a udn and st.""" - if headers := self._all_headers_from_ssdp_devices.get((udn, st)): - return await self._async_headers_to_discovery_info(headers) + for ssdp_device in self._ssdp_devices: + if ssdp_device.udn == udn: + if headers := ssdp_device.combined_headers(st): + return await self._async_headers_to_discovery_info( + ssdp_device, headers + ) return None async def async_get_discovery_info_by_st(self, st: str) -> list[SsdpServiceInfo]: """Return matching discovery_infos for a st.""" return [ - await self._async_headers_to_discovery_info(headers) - for udn_st, headers in self._all_headers_from_ssdp_devices.items() - if udn_st[1] == st + await self._async_headers_to_discovery_info(ssdp_device, headers) + for ssdp_device in self._ssdp_devices + if (headers := ssdp_device.combined_headers(st)) ] async def async_get_discovery_info_by_udn(self, udn: str) -> list[SsdpServiceInfo]: """Return matching discovery_infos for a udn.""" return [ - await self._async_headers_to_discovery_info(headers) - for udn_st, headers in self._all_headers_from_ssdp_devices.items() - if udn_st[0] == udn + await self._async_headers_to_discovery_info(ssdp_device, headers) + for ssdp_device in self._ssdp_devices + for headers in ssdp_device.all_combined_headers.values() + if ssdp_device.udn == udn ] def discovery_info_from_headers_and_description( + ssdp_device: SsdpDevice, combined_headers: CaseInsensitiveDict, info_desc: Mapping[str, Any], ) -> SsdpServiceInfo: @@ -627,6 +627,7 @@ def discovery_info_from_headers_and_description( ssdp_nt=combined_headers.get_lower("nt"), ssdp_headers=combined_headers, upnp=upnp_info, + ssdp_all_locations=set(ssdp_device.locations), ) diff --git a/homeassistant/components/stream/recorder.py b/homeassistant/components/stream/recorder.py index a334171abb8..a3441eb76da 100644 --- a/homeassistant/components/stream/recorder.py +++ b/homeassistant/components/stream/recorder.py @@ -78,7 +78,9 @@ class RecorderOutput(StreamOutput): def write_segment(segment: Segment) -> None: """Write a segment to output.""" + # fmt: off nonlocal output, output_v, output_a, last_stream_id, running_duration, last_sequence + # fmt: on # Because the stream_worker is in a different thread from the record service, # the lookback segments may still have some overlap with the recorder segments if segment.sequence <= last_sequence: diff --git a/homeassistant/components/stt/manifest.json b/homeassistant/components/stt/manifest.json index 53bb7fa1937..265c3363e2b 100644 --- a/homeassistant/components/stt/manifest.json +++ b/homeassistant/components/stt/manifest.json @@ -1,7 +1,7 @@ { "domain": "stt", "name": "Speech-to-text (STT)", - "codeowners": ["@home-assistant/core", "@pvizeli"], + "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/stt", "integration_type": "entity", diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 15c346fadab..3da91c4aa52 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], - "requirements": ["pysuez==0.1.19"] + "requirements": ["pysuez==0.2.0"] } diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index 43075276be6..d0c1bba211e 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -45,7 +45,7 @@ def setup_platform( password = config[CONF_PASSWORD] counter_id = config[CONF_COUNTER_ID] try: - client = SuezClient(username, password, counter_id) + client = SuezClient(username, password, counter_id, provider=None) if not client.check_credentials(): _LOGGER.warning("Wrong username and/or password") diff --git a/homeassistant/components/surepetcare/config_flow.py b/homeassistant/components/surepetcare/config_flow.py index 7c4509259ad..38bed2e20a9 100644 --- a/homeassistant/components/surepetcare/config_flow.py +++ b/homeassistant/components/surepetcare/config_flow.py @@ -118,6 +118,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="reauth_confirm", + description_placeholders={"username": self._username}, data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), errors=errors, ) diff --git a/homeassistant/components/surepetcare/strings.json b/homeassistant/components/surepetcare/strings.json index 2d297cc829e..c3b7864f36a 100644 --- a/homeassistant/components/surepetcare/strings.json +++ b/homeassistant/components/surepetcare/strings.json @@ -6,6 +6,13 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "Re-authenticate by entering password for {username}", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { diff --git a/homeassistant/components/synology_dsm/camera.py b/homeassistant/components/synology_dsm/camera.py index b76699631cb..a2f08202319 100644 --- a/homeassistant/components/synology_dsm/camera.py +++ b/homeassistant/components/synology_dsm/camera.py @@ -153,7 +153,9 @@ class SynoDSMCamera(SynologyDSMBaseEntity[SynologyDSMCameraUpdateCoordinator], C if not self.available: return None try: - return await self._api.surveillance_station.get_camera_image(self.entity_description.key, self.snapshot_quality) # type: ignore[no-any-return] + return await self._api.surveillance_station.get_camera_image( # type: ignore[no-any-return] + self.entity_description.key, self.snapshot_quality + ) except ( SynologyDSMAPIErrorException, SynologyDSMRequestException, diff --git a/homeassistant/components/system_bridge/manifest.json b/homeassistant/components/system_bridge/manifest.json index 1bc00aee4f5..17c43fa4d24 100644 --- a/homeassistant/components/system_bridge/manifest.json +++ b/homeassistant/components/system_bridge/manifest.json @@ -10,6 +10,6 @@ "iot_class": "local_push", "loggers": ["systembridgeconnector"], "quality_scale": "silver", - "requirements": ["systembridgeconnector==3.9.5"], + "requirements": ["systembridgeconnector==3.10.0"], "zeroconf": ["_system-bridge._tcp.local."] } diff --git a/homeassistant/components/tag/__init__.py b/homeassistant/components/tag/__init__.py index e82083f73ec..59b0fa995e4 100644 --- a/homeassistant/components/tag/__init__.py +++ b/homeassistant/components/tag/__init__.py @@ -118,10 +118,19 @@ async def async_scan_tag( if DOMAIN not in hass.config.components: raise HomeAssistantError("tag component has not been set up.") - hass.bus.async_fire( - EVENT_TAG_SCANNED, {TAG_ID: tag_id, DEVICE_ID: device_id}, context=context - ) helper = hass.data[DOMAIN][TAGS] + + # Get name from helper, default value None if not present in data + tag_name = None + if tag_data := helper.data.get(tag_id): + tag_name = tag_data.get(CONF_NAME) + + hass.bus.async_fire( + EVENT_TAG_SCANNED, + {TAG_ID: tag_id, CONF_NAME: tag_name, DEVICE_ID: device_id}, + context=context, + ) + if tag_id in helper.data: await helper.async_update_item(tag_id, {LAST_SCANNED: dt_util.utcnow()}) else: diff --git a/homeassistant/components/tailscale/binary_sensor.py b/homeassistant/components/tailscale/binary_sensor.py index ecc561f0355..ee1c682c559 100644 --- a/homeassistant/components/tailscale/binary_sensor.py +++ b/homeassistant/components/tailscale/binary_sensor.py @@ -20,20 +20,13 @@ from . import TailscaleEntity from .const import DOMAIN -@dataclass -class TailscaleBinarySensorEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class TailscaleBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes a Tailscale binary sensor entity.""" is_on_fn: Callable[[TailscaleDevice], bool | None] -@dataclass -class TailscaleBinarySensorEntityDescription( - BinarySensorEntityDescription, TailscaleBinarySensorEntityDescriptionMixin -): - """Describes a Tailscale binary sensor entity.""" - - BINARY_SENSORS: tuple[TailscaleBinarySensorEntityDescription, ...] = ( TailscaleBinarySensorEntityDescription( key="update_available", diff --git a/homeassistant/components/tailscale/sensor.py b/homeassistant/components/tailscale/sensor.py index 75dca4ed840..f5850848c8c 100644 --- a/homeassistant/components/tailscale/sensor.py +++ b/homeassistant/components/tailscale/sensor.py @@ -21,20 +21,13 @@ from . import TailscaleEntity from .const import DOMAIN -@dataclass -class TailscaleSensorEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class TailscaleSensorEntityDescription(SensorEntityDescription): + """Describes a Tailscale sensor entity.""" value_fn: Callable[[TailscaleDevice], datetime | str | None] -@dataclass -class TailscaleSensorEntityDescription( - SensorEntityDescription, TailscaleSensorEntityDescriptionMixin -): - """Describes a Tailscale sensor entity.""" - - SENSORS: tuple[TailscaleSensorEntityDescription, ...] = ( TailscaleSensorEntityDescription( key="expires", diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index 76677c3813e..7d150e95977 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -786,6 +786,7 @@ class TelegramNotificationService: photo=file_content, caption=kwargs.get(ATTR_CAPTION), disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], reply_markup=params[ATTR_REPLYMARKUP], timeout=params[ATTR_TIMEOUT], parse_mode=params[ATTR_PARSER], @@ -799,6 +800,7 @@ class TelegramNotificationService: chat_id=chat_id, sticker=file_content, disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], reply_markup=params[ATTR_REPLYMARKUP], timeout=params[ATTR_TIMEOUT], ) @@ -812,6 +814,7 @@ class TelegramNotificationService: video=file_content, caption=kwargs.get(ATTR_CAPTION), disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], reply_markup=params[ATTR_REPLYMARKUP], timeout=params[ATTR_TIMEOUT], parse_mode=params[ATTR_PARSER], @@ -825,6 +828,7 @@ class TelegramNotificationService: document=file_content, caption=kwargs.get(ATTR_CAPTION), disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], reply_markup=params[ATTR_REPLYMARKUP], timeout=params[ATTR_TIMEOUT], parse_mode=params[ATTR_PARSER], @@ -838,6 +842,7 @@ class TelegramNotificationService: voice=file_content, caption=kwargs.get(ATTR_CAPTION), disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], reply_markup=params[ATTR_REPLYMARKUP], timeout=params[ATTR_TIMEOUT], ) @@ -850,6 +855,7 @@ class TelegramNotificationService: animation=file_content, caption=kwargs.get(ATTR_CAPTION), disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], reply_markup=params[ATTR_REPLYMARKUP], timeout=params[ATTR_TIMEOUT], parse_mode=params[ATTR_PARSER], @@ -872,6 +878,7 @@ class TelegramNotificationService: chat_id=chat_id, sticker=stickerid, disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], reply_markup=params[ATTR_REPLYMARKUP], timeout=params[ATTR_TIMEOUT], ) @@ -895,6 +902,7 @@ class TelegramNotificationService: latitude=latitude, longitude=longitude, disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], timeout=params[ATTR_TIMEOUT], ) @@ -923,6 +931,7 @@ class TelegramNotificationService: allows_multiple_answers=allows_multiple_answers, open_period=openperiod, disable_notification=params[ATTR_DISABLE_NOTIF], + reply_to_message_id=params[ATTR_REPLY_TO_MSGID], timeout=params[ATTR_TIMEOUT], ) diff --git a/homeassistant/components/template/__init__.py b/homeassistant/components/template/__init__.py index 22919ac9e70..d52dc0cf166 100644 --- a/homeassistant/components/template/__init__.py +++ b/homeassistant/components/template/__init__.py @@ -34,8 +34,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: _LOGGER.error(err) return - conf = await conf_util.async_process_component_config( - hass, unprocessed_conf, await async_get_integration(hass, DOMAIN) + integration = await async_get_integration(hass, DOMAIN) + conf = await conf_util.async_process_component_and_handle_errors( + hass, unprocessed_conf, integration ) if conf is None: diff --git a/homeassistant/components/template/config.py b/homeassistant/components/template/config.py index 3329f185f08..9da43082d2b 100644 --- a/homeassistant/components/template/config.py +++ b/homeassistant/components/template/config.py @@ -10,10 +10,13 @@ from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN from homeassistant.components.select import DOMAIN as SELECT_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN -from homeassistant.config import async_log_exception, config_without_domain +from homeassistant.config import async_log_schema_error, config_without_domain from homeassistant.const import CONF_BINARY_SENSORS, CONF_SENSORS, CONF_UNIQUE_ID +from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.trigger import async_validate_trigger_config +from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_notify_setup_error from . import ( binary_sensor as binary_sensor_platform, @@ -64,7 +67,7 @@ CONFIG_SECTION_SCHEMA = vol.Schema( ) -async def async_validate_config(hass, config): +async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> ConfigType: """Validate config.""" if DOMAIN not in config: return config @@ -80,7 +83,8 @@ async def async_validate_config(hass, config): hass, cfg[CONF_TRIGGER] ) except vol.Invalid as err: - async_log_exception(err, DOMAIN, cfg, hass) + async_log_schema_error(err, DOMAIN, cfg, hass) + async_notify_setup_error(hass, DOMAIN) continue legacy_warn_printed = False diff --git a/homeassistant/components/template/fan.py b/homeassistant/components/template/fan.py index d39fa56775a..8aeede42552 100644 --- a/homeassistant/components/template/fan.py +++ b/homeassistant/components/template/fan.py @@ -282,15 +282,6 @@ class TemplateFan(TemplateEntity, FanEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset_mode of the fan.""" - if self.preset_modes and preset_mode not in self.preset_modes: - _LOGGER.error( - "Received invalid preset_mode: %s for entity %s. Expected: %s", - preset_mode, - self.entity_id, - self.preset_modes, - ) - return - self._preset_mode = preset_mode if self._set_preset_mode_script: diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index b3f276240b5..89c4826f1e6 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -11,6 +11,9 @@ from homeassistant.components.light import ( ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR, + ATTR_RGB_COLOR, + ATTR_RGBW_COLOR, + ATTR_RGBWW_COLOR, ATTR_TRANSITION, ENTITY_ID_FORMAT, ColorMode, @@ -46,8 +49,18 @@ from .template_entity import ( _LOGGER = logging.getLogger(__name__) _VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"] +# Legacy CONF_COLOR_ACTION = "set_color" CONF_COLOR_TEMPLATE = "color_template" + +CONF_HS_ACTION = "set_hs" +CONF_HS_TEMPLATE = "hs_template" +CONF_RGB_ACTION = "set_rgb" +CONF_RGB_TEMPLATE = "rgb_template" +CONF_RGBW_ACTION = "set_rgbw" +CONF_RGBW_TEMPLATE = "rgbw_template" +CONF_RGBWW_ACTION = "set_rgbww" +CONF_RGBWW_TEMPLATE = "rgbww_template" CONF_EFFECT_ACTION = "set_effect" CONF_EFFECT_LIST_TEMPLATE = "effect_list_template" CONF_EFFECT_TEMPLATE = "effect_template" @@ -67,8 +80,16 @@ LIGHT_SCHEMA = vol.All( cv.deprecated(CONF_ENTITY_ID), vol.Schema( { - vol.Optional(CONF_COLOR_ACTION): cv.SCRIPT_SCHEMA, - vol.Optional(CONF_COLOR_TEMPLATE): cv.template, + vol.Exclusive(CONF_COLOR_ACTION, "hs_legacy_action"): cv.SCRIPT_SCHEMA, + vol.Exclusive(CONF_COLOR_TEMPLATE, "hs_legacy_template"): cv.template, + vol.Exclusive(CONF_HS_ACTION, "hs_legacy_action"): cv.SCRIPT_SCHEMA, + vol.Exclusive(CONF_HS_TEMPLATE, "hs_legacy_template"): cv.template, + vol.Optional(CONF_RGB_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGB_TEMPLATE): cv.template, + vol.Optional(CONF_RGBW_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGBW_TEMPLATE): cv.template, + vol.Optional(CONF_RGBWW_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGBWW_TEMPLATE): cv.template, vol.Inclusive(CONF_EFFECT_ACTION, "effect"): cv.SCRIPT_SCHEMA, vol.Inclusive(CONF_EFFECT_LIST_TEMPLATE, "effect"): cv.template, vol.Inclusive(CONF_EFFECT_TEMPLATE, "effect"): cv.template, @@ -166,6 +187,22 @@ class LightTemplate(TemplateEntity, LightEntity): if (color_action := config.get(CONF_COLOR_ACTION)) is not None: self._color_script = Script(hass, color_action, friendly_name, DOMAIN) self._color_template = config.get(CONF_COLOR_TEMPLATE) + self._hs_script = None + if (hs_action := config.get(CONF_HS_ACTION)) is not None: + self._hs_script = Script(hass, hs_action, friendly_name, DOMAIN) + self._hs_template = config.get(CONF_HS_TEMPLATE) + self._rgb_script = None + if (rgb_action := config.get(CONF_RGB_ACTION)) is not None: + self._rgb_script = Script(hass, rgb_action, friendly_name, DOMAIN) + self._rgb_template = config.get(CONF_RGB_TEMPLATE) + self._rgbw_script = None + if (rgbw_action := config.get(CONF_RGBW_ACTION)) is not None: + self._rgbw_script = Script(hass, rgbw_action, friendly_name, DOMAIN) + self._rgbw_template = config.get(CONF_RGBW_TEMPLATE) + self._rgbww_script = None + if (rgbww_action := config.get(CONF_RGBWW_ACTION)) is not None: + self._rgbww_script = Script(hass, rgbww_action, friendly_name, DOMAIN) + self._rgbww_template = config.get(CONF_RGBWW_TEMPLATE) self._effect_script = None if (effect_action := config.get(CONF_EFFECT_ACTION)) is not None: self._effect_script = Script(hass, effect_action, friendly_name, DOMAIN) @@ -178,24 +215,39 @@ class LightTemplate(TemplateEntity, LightEntity): self._state = False self._brightness = None self._temperature = None - self._color = None + self._hs_color = None + self._rgb_color = None + self._rgbw_color = None + self._rgbww_color = None self._effect = None self._effect_list = None - self._fixed_color_mode = None + self._color_mode = None self._max_mireds = None self._min_mireds = None self._supports_transition = False + self._supported_color_modes = None color_modes = {ColorMode.ONOFF} if self._level_script is not None: color_modes.add(ColorMode.BRIGHTNESS) if self._temperature_script is not None: color_modes.add(ColorMode.COLOR_TEMP) + if self._hs_script is not None: + color_modes.add(ColorMode.HS) if self._color_script is not None: color_modes.add(ColorMode.HS) + if self._rgb_script is not None: + color_modes.add(ColorMode.RGB) + if self._rgbw_script is not None: + color_modes.add(ColorMode.RGBW) + if self._rgbww_script is not None: + color_modes.add(ColorMode.RGBWW) + self._supported_color_modes = filter_supported_color_modes(color_modes) + if len(self._supported_color_modes) > 1: + self._color_mode = ColorMode.UNKNOWN if len(self._supported_color_modes) == 1: - self._fixed_color_mode = next(iter(self._supported_color_modes)) + self._color_mode = next(iter(self._supported_color_modes)) self._attr_supported_features = LightEntityFeature(0) if self._effect_script is not None: @@ -232,7 +284,22 @@ class LightTemplate(TemplateEntity, LightEntity): @property def hs_color(self) -> tuple[float, float] | None: """Return the hue and saturation color value [float, float].""" - return self._color + return self._hs_color + + @property + def rgb_color(self) -> tuple[int, int, int] | None: + """Return the rgb color value.""" + return self._rgb_color + + @property + def rgbw_color(self) -> tuple[int, int, int, int] | None: + """Return the rgbw color value.""" + return self._rgbw_color + + @property + def rgbww_color(self) -> tuple[int, int, int, int, int] | None: + """Return the rgbww color value.""" + return self._rgbww_color @property def effect(self) -> str | None: @@ -247,12 +314,7 @@ class LightTemplate(TemplateEntity, LightEntity): @property def color_mode(self): """Return current color mode.""" - if self._fixed_color_mode: - return self._fixed_color_mode - # Support for ct + hs, prioritize hs - if self._color is not None: - return ColorMode.HS - return ColorMode.COLOR_TEMP + return self._color_mode @property def supported_color_modes(self): @@ -305,10 +367,42 @@ class LightTemplate(TemplateEntity, LightEntity): ) if self._color_template: self.add_template_attribute( - "_color", + "_hs_color", self._color_template, None, - self._update_color, + self._update_hs, + none_on_template_error=True, + ) + if self._hs_template: + self.add_template_attribute( + "_hs_color", + self._hs_template, + None, + self._update_hs, + none_on_template_error=True, + ) + if self._rgb_template: + self.add_template_attribute( + "_rgb_color", + self._rgb_template, + None, + self._update_rgb, + none_on_template_error=True, + ) + if self._rgbw_template: + self.add_template_attribute( + "_rgbw_color", + self._rgbw_template, + None, + self._update_rgbw, + none_on_template_error=True, + ) + if self._rgbww_template: + self.add_template_attribute( + "_rgbww_color", + self._rgbww_template, + None, + self._update_rgbww, none_on_template_error=True, ) if self._effect_list_template: @@ -337,7 +431,7 @@ class LightTemplate(TemplateEntity, LightEntity): ) super()._async_setup_templates() - async def async_turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self, **kwargs: Any) -> None: # noqa: C901 """Turn the light on.""" optimistic_set = False # set optimistic states @@ -357,19 +451,88 @@ class LightTemplate(TemplateEntity, LightEntity): "Optimistically setting color temperature to %s", kwargs[ATTR_COLOR_TEMP], ) + self._color_mode = ColorMode.COLOR_TEMP self._temperature = kwargs[ATTR_COLOR_TEMP] - if self._color_template is None: - self._color = None + if self._hs_template is None and self._color_template is None: + self._hs_color = None + if self._rgb_template is None: + self._rgb_color = None + if self._rgbw_template is None: + self._rgbw_color = None + if self._rgbww_template is None: + self._rgbww_color = None optimistic_set = True - if self._color_template is None and ATTR_HS_COLOR in kwargs: + if ( + self._hs_template is None + and self._color_template is None + and ATTR_HS_COLOR in kwargs + ): _LOGGER.debug( - "Optimistically setting color to %s", + "Optimistically setting hs color to %s", kwargs[ATTR_HS_COLOR], ) - self._color = kwargs[ATTR_HS_COLOR] + self._color_mode = ColorMode.HS + self._hs_color = kwargs[ATTR_HS_COLOR] if self._temperature_template is None: self._temperature = None + if self._rgb_template is None: + self._rgb_color = None + if self._rgbw_template is None: + self._rgbw_color = None + if self._rgbww_template is None: + self._rgbww_color = None + optimistic_set = True + + if self._rgb_template is None and ATTR_RGB_COLOR in kwargs: + _LOGGER.debug( + "Optimistically setting rgb color to %s", + kwargs[ATTR_RGB_COLOR], + ) + self._color_mode = ColorMode.RGB + self._rgb_color = kwargs[ATTR_RGB_COLOR] + if self._temperature_template is None: + self._temperature = None + if self._hs_template is None and self._color_template is None: + self._hs_color = None + if self._rgbw_template is None: + self._rgbw_color = None + if self._rgbww_template is None: + self._rgbww_color = None + optimistic_set = True + + if self._rgbw_template is None and ATTR_RGBW_COLOR in kwargs: + _LOGGER.debug( + "Optimistically setting rgbw color to %s", + kwargs[ATTR_RGBW_COLOR], + ) + self._color_mode = ColorMode.RGBW + self._rgbw_color = kwargs[ATTR_RGBW_COLOR] + if self._temperature_template is None: + self._temperature = None + if self._hs_template is None and self._color_template is None: + self._hs_color = None + if self._rgb_template is None: + self._rgb_color = None + if self._rgbww_template is None: + self._rgbww_color = None + optimistic_set = True + + if self._rgbww_template is None and ATTR_RGBWW_COLOR in kwargs: + _LOGGER.debug( + "Optimistically setting rgbww color to %s", + kwargs[ATTR_RGBWW_COLOR], + ) + self._color_mode = ColorMode.RGBWW + self._rgbww_color = kwargs[ATTR_RGBWW_COLOR] + if self._temperature_template is None: + self._temperature = None + if self._hs_template is None and self._color_template is None: + self._hs_color = None + if self._rgb_template is None: + self._rgb_color = None + if self._rgbw_template is None: + self._rgbw_color = None optimistic_set = True common_params = {} @@ -413,6 +576,58 @@ class LightTemplate(TemplateEntity, LightEntity): await self.async_run_script( self._color_script, run_variables=common_params, context=self._context ) + elif ATTR_HS_COLOR in kwargs and self._hs_script: + hs_value = kwargs[ATTR_HS_COLOR] + common_params["hs"] = hs_value + common_params["h"] = int(hs_value[0]) + common_params["s"] = int(hs_value[1]) + + await self.async_run_script( + self._hs_script, run_variables=common_params, context=self._context + ) + elif ATTR_RGBWW_COLOR in kwargs and self._rgbww_script: + rgbww_value = kwargs[ATTR_RGBWW_COLOR] + common_params["rgbww"] = rgbww_value + common_params["rgb"] = ( + int(rgbww_value[0]), + int(rgbww_value[1]), + int(rgbww_value[2]), + ) + common_params["r"] = int(rgbww_value[0]) + common_params["g"] = int(rgbww_value[1]) + common_params["b"] = int(rgbww_value[2]) + common_params["cw"] = int(rgbww_value[3]) + common_params["ww"] = int(rgbww_value[4]) + + await self.async_run_script( + self._rgbww_script, run_variables=common_params, context=self._context + ) + elif ATTR_RGBW_COLOR in kwargs and self._rgbw_script: + rgbw_value = kwargs[ATTR_RGBW_COLOR] + common_params["rgbw"] = rgbw_value + common_params["rgb"] = ( + int(rgbw_value[0]), + int(rgbw_value[1]), + int(rgbw_value[2]), + ) + common_params["r"] = int(rgbw_value[0]) + common_params["g"] = int(rgbw_value[1]) + common_params["b"] = int(rgbw_value[2]) + common_params["w"] = int(rgbw_value[3]) + + await self.async_run_script( + self._rgbw_script, run_variables=common_params, context=self._context + ) + elif ATTR_RGB_COLOR in kwargs and self._rgb_script: + rgb_value = kwargs[ATTR_RGB_COLOR] + common_params["rgb"] = rgb_value + common_params["r"] = int(rgb_value[0]) + common_params["g"] = int(rgb_value[1]) + common_params["b"] = int(rgb_value[2]) + + await self.async_run_script( + self._rgb_script, run_variables=common_params, context=self._context + ) elif ATTR_BRIGHTNESS in kwargs and self._level_script: await self.async_run_script( self._level_script, run_variables=common_params, context=self._context @@ -560,18 +775,19 @@ class LightTemplate(TemplateEntity, LightEntity): " this light, or 'None'" ) self._temperature = None + self._color_mode = ColorMode.COLOR_TEMP @callback - def _update_color(self, render): - """Update the hs_color from the template.""" + def _update_hs(self, render): + """Update the color from the template.""" if render is None: - self._color = None + self._hs_color = None return h_str = s_str = None if isinstance(render, str): if render in ("None", ""): - self._color = None + self._hs_color = None return h_str, s_str = map( float, render.replace("(", "").replace(")", "").split(",", 1) @@ -582,10 +798,12 @@ class LightTemplate(TemplateEntity, LightEntity): if ( h_str is not None and s_str is not None + and isinstance(h_str, (int, float)) + and isinstance(s_str, (int, float)) and 0 <= h_str <= 360 and 0 <= s_str <= 100 ): - self._color = (h_str, s_str) + self._hs_color = (h_str, s_str) elif h_str is not None and s_str is not None: _LOGGER.error( ( @@ -596,12 +814,151 @@ class LightTemplate(TemplateEntity, LightEntity): s_str, self.entity_id, ) - self._color = None + self._hs_color = None else: _LOGGER.error( "Received invalid hs_color : (%s) for entity %s", render, self.entity_id ) - self._color = None + self._hs_color = None + self._color_mode = ColorMode.HS + + @callback + def _update_rgb(self, render): + """Update the color from the template.""" + if render is None: + self._rgb_color = None + return + + r_int = g_int = b_int = None + if isinstance(render, str): + if render in ("None", ""): + self._rgb_color = None + return + cleanup_char = ["(", ")", "[", "]", " "] + for char in cleanup_char: + render = render.replace(char, "") + r_int, g_int, b_int = map(int, render.split(",", 3)) + elif isinstance(render, (list, tuple)) and len(render) == 3: + r_int, g_int, b_int = render + + if all( + value is not None and isinstance(value, (int, float)) and 0 <= value <= 255 + for value in (r_int, g_int, b_int) + ): + self._rgb_color = (r_int, g_int, b_int) + elif any( + isinstance(value, (int, float)) and not 0 <= value <= 255 + for value in (r_int, g_int, b_int) + ): + _LOGGER.error( + "Received invalid rgb_color : (%s, %s, %s) for entity %s. Expected: (0-255, 0-255, 0-255)", + r_int, + g_int, + b_int, + self.entity_id, + ) + self._rgb_color = None + else: + _LOGGER.error( + "Received invalid rgb_color : (%s) for entity %s", + render, + self.entity_id, + ) + self._rgb_color = None + self._color_mode = ColorMode.RGB + + @callback + def _update_rgbw(self, render): + """Update the color from the template.""" + if render is None: + self._rgbw_color = None + return + + r_int = g_int = b_int = w_int = None + if isinstance(render, str): + if render in ("None", ""): + self._rgb_color = None + return + cleanup_char = ["(", ")", "[", "]", " "] + for char in cleanup_char: + render = render.replace(char, "") + r_int, g_int, b_int, w_int = map(int, render.split(",", 4)) + elif isinstance(render, (list, tuple)) and len(render) == 4: + r_int, g_int, b_int, w_int = render + + if all( + value is not None and isinstance(value, (int, float)) and 0 <= value <= 255 + for value in (r_int, g_int, b_int, w_int) + ): + self._rgbw_color = (r_int, g_int, b_int, w_int) + elif any( + isinstance(value, (int, float)) and not 0 <= value <= 255 + for value in (r_int, g_int, b_int, w_int) + ): + _LOGGER.error( + "Received invalid rgb_color : (%s, %s, %s, %s) for entity %s. Expected: (0-255, 0-255, 0-255, 0-255)", + r_int, + g_int, + b_int, + w_int, + self.entity_id, + ) + self._rgbw_color = None + else: + _LOGGER.error( + "Received invalid rgb_color : (%s) for entity %s", + render, + self.entity_id, + ) + self._rgbw_color = None + self._color_mode = ColorMode.RGBW + + @callback + def _update_rgbww(self, render): + """Update the color from the template.""" + if render is None: + self._rgbww_color = None + return + + r_int = g_int = b_int = cw_int = ww_int = None + if isinstance(render, str): + if render in ("None", ""): + self._rgb_color = None + return + cleanup_char = ["(", ")", "[", "]", " "] + for char in cleanup_char: + render = render.replace(char, "") + r_int, g_int, b_int, cw_int, ww_int = map(int, render.split(",", 5)) + elif isinstance(render, (list, tuple)) and len(render) == 5: + r_int, g_int, b_int, cw_int, ww_int = render + + if all( + value is not None and isinstance(value, (int, float)) and 0 <= value <= 255 + for value in (r_int, g_int, b_int, cw_int, ww_int) + ): + self._rgbww_color = (r_int, g_int, b_int, cw_int, ww_int) + elif any( + isinstance(value, (int, float)) and not 0 <= value <= 255 + for value in (r_int, g_int, b_int, cw_int, ww_int) + ): + _LOGGER.error( + "Received invalid rgb_color : (%s, %s, %s, %s, %s) for entity %s. Expected: (0-255, 0-255, 0-255, 0-255)", + r_int, + g_int, + b_int, + cw_int, + ww_int, + self.entity_id, + ) + self._rgbww_color = None + else: + _LOGGER.error( + "Received invalid rgb_color : (%s) for entity %s", + render, + self.entity_id, + ) + self._rgbww_color = None + self._color_mode = ColorMode.RGBWW @callback def _update_max_mireds(self, render): diff --git a/homeassistant/components/template/weather.py b/homeassistant/components/template/weather.py index 4e9149ebd07..0a00d1e79b4 100644 --- a/homeassistant/components/template/weather.py +++ b/homeassistant/components/template/weather.py @@ -57,7 +57,8 @@ from .template_entity import TemplateEntity, rewrite_common_legacy_to_modern_con from .trigger_entity import TriggerEntity CHECK_FORECAST_KEYS = ( - set().union(Forecast.__annotations__.keys()) + set() + .union(Forecast.__annotations__.keys()) # Manually add the forecast resulting attributes that only exists # as native_* in the Forecast definition .union(("apparent_temperature", "wind_gust_speed", "dew_point")) diff --git a/homeassistant/components/thread/dataset_store.py b/homeassistant/components/thread/dataset_store.py index f814fbffbd0..9c5d79cc0e0 100644 --- a/homeassistant/components/thread/dataset_store.py +++ b/homeassistant/components/thread/dataset_store.py @@ -38,7 +38,7 @@ class DatasetEntry: tlv: str created: datetime = dataclasses.field(default_factory=dt_util.utcnow) - id: str = dataclasses.field(default_factory=ulid_util.ulid) + id: str = dataclasses.field(default_factory=ulid_util.ulid_now) @property def channel(self) -> int | None: diff --git a/homeassistant/components/tibber/config_flow.py b/homeassistant/components/tibber/config_flow.py index fbd2345fb80..3fb426d6b11 100644 --- a/homeassistant/components/tibber/config_flow.py +++ b/homeassistant/components/tibber/config_flow.py @@ -19,6 +19,7 @@ DATA_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str}) ERR_TIMEOUT = "timeout" ERR_CLIENT = "cannot_connect" ERR_TOKEN = "invalid_access_token" +TOKEN_URL = "https://developer.tibber.com/settings/access-token" class TibberConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): @@ -60,6 +61,7 @@ class TibberConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, + description_placeholders={"url": TOKEN_URL}, errors=errors, ) @@ -75,5 +77,6 @@ class TibberConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, + description_placeholders={"url": TOKEN_URL}, errors={}, ) diff --git a/homeassistant/components/tibber/strings.json b/homeassistant/components/tibber/strings.json index 8306f25f587..c7cef9f4657 100644 --- a/homeassistant/components/tibber/strings.json +++ b/homeassistant/components/tibber/strings.json @@ -13,7 +13,7 @@ "data": { "access_token": "[%key:common::config_flow::data::access_token%]" }, - "description": "Enter your access token from https://developer.tibber.com/settings/accesstoken" + "description": "Enter your access token from {url}" } } } diff --git a/homeassistant/components/todo/__init__.py b/homeassistant/components/todo/__init__.py index 968256ce3d9..c0e0303d76e 100644 --- a/homeassistant/components/todo/__init__.py +++ b/homeassistant/components/todo/__init__.py @@ -1,9 +1,10 @@ """The todo integration.""" +from collections.abc import Callable, Iterable import dataclasses import datetime import logging -from typing import Any +from typing import Any, final import voluptuous as vol @@ -11,7 +12,13 @@ from homeassistant.components import frontend, websocket_api from homeassistant.components.websocket_api import ERR_NOT_FOUND, ERR_NOT_SUPPORTED from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ENTITY_ID -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import ( + CALLBACK_TYPE, + HomeAssistant, + ServiceCall, + SupportsResponse, + callback, +) from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import ( # noqa: F401 @@ -21,8 +28,18 @@ from homeassistant.helpers.config_validation import ( # noqa: F401 from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType +from homeassistant.util import dt as dt_util +from homeassistant.util.json import JsonValueType -from .const import DOMAIN, TodoItemStatus, TodoListEntityFeature +from .const import ( + ATTR_DESCRIPTION, + ATTR_DUE, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + DOMAIN, + TodoItemStatus, + TodoListEntityFeature, +) _LOGGER = logging.getLogger(__name__) @@ -31,6 +48,63 @@ SCAN_INTERVAL = datetime.timedelta(seconds=60) ENTITY_ID_FORMAT = DOMAIN + ".{}" +@dataclasses.dataclass +class TodoItemFieldDescription: + """A description of To-do item fields and validation requirements.""" + + service_field: str + """Field name for service calls.""" + + todo_item_field: str + """Field name for TodoItem.""" + + validation: Callable[[Any], Any] + """Voluptuous validation function.""" + + required_feature: TodoListEntityFeature + """Entity feature that enables this field.""" + + +TODO_ITEM_FIELDS = [ + TodoItemFieldDescription( + service_field=ATTR_DUE_DATE, + validation=cv.date, + todo_item_field=ATTR_DUE, + required_feature=TodoListEntityFeature.SET_DUE_DATE_ON_ITEM, + ), + TodoItemFieldDescription( + service_field=ATTR_DUE_DATETIME, + validation=vol.All(cv.datetime, dt_util.as_local), + todo_item_field=ATTR_DUE, + required_feature=TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, + ), + TodoItemFieldDescription( + service_field=ATTR_DESCRIPTION, + validation=cv.string, + todo_item_field=ATTR_DESCRIPTION, + required_feature=TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM, + ), +] + +TODO_ITEM_FIELD_SCHEMA = { + vol.Optional(desc.service_field): desc.validation for desc in TODO_ITEM_FIELDS +} +TODO_ITEM_FIELD_VALIDATIONS = [cv.has_at_most_one_key(ATTR_DUE_DATE, ATTR_DUE_DATETIME)] + + +def _validate_supported_features( + supported_features: int | None, call_data: dict[str, Any] +) -> None: + """Validate service call fields against entity supported features.""" + for desc in TODO_ITEM_FIELDS: + if desc.service_field not in call_data: + continue + if not supported_features or not supported_features & desc.required_feature: + raise ValueError( + f"Entity does not support setting field '{desc.service_field}'" + ) + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Todo entities.""" component = hass.data[DOMAIN] = EntityComponent[TodoListEntity]( @@ -39,14 +113,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: frontend.async_register_built_in_panel(hass, "todo", "todo", "mdi:clipboard-list") + websocket_api.async_register_command(hass, websocket_handle_subscribe_todo_items) websocket_api.async_register_command(hass, websocket_handle_todo_item_list) websocket_api.async_register_command(hass, websocket_handle_todo_item_move) component.async_register_entity_service( "add_item", - { - vol.Required("item"): vol.All(cv.string, vol.Length(min=1)), - }, + vol.All( + cv.make_entity_service_schema( + { + vol.Required("item"): vol.All(cv.string, vol.Length(min=1)), + **TODO_ITEM_FIELD_SCHEMA, + } + ), + *TODO_ITEM_FIELD_VALIDATIONS, + ), _async_add_todo_item, required_features=[TodoListEntityFeature.CREATE_TODO_ITEM], ) @@ -58,11 +139,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: vol.Required("item"): vol.All(cv.string, vol.Length(min=1)), vol.Optional("rename"): vol.All(cv.string, vol.Length(min=1)), vol.Optional("status"): vol.In( - {TodoItemStatus.NEEDS_ACTION, TodoItemStatus.COMPLETED} + {TodoItemStatus.NEEDS_ACTION, TodoItemStatus.COMPLETED}, ), + **TODO_ITEM_FIELD_SCHEMA, } ), - cv.has_at_least_one_key("rename", "status"), + *TODO_ITEM_FIELD_VALIDATIONS, + cv.has_at_least_one_key( + "rename", "status", *[desc.service_field for desc in TODO_ITEM_FIELDS] + ), ), _async_update_todo_item, required_features=[TodoListEntityFeature.UPDATE_TODO_ITEM], @@ -77,6 +162,25 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: _async_remove_todo_items, required_features=[TodoListEntityFeature.DELETE_TODO_ITEM], ) + component.async_register_entity_service( + "get_items", + cv.make_entity_service_schema( + { + vol.Optional("status"): vol.All( + cv.ensure_list, + [vol.In({TodoItemStatus.NEEDS_ACTION, TodoItemStatus.COMPLETED})], + ), + } + ), + _async_get_todo_items, + supports_response=SupportsResponse.ONLY, + ) + component.async_register_entity_service( + "remove_completed_items", + {}, + _async_remove_completed_items, + required_features=[TodoListEntityFeature.DELETE_TODO_ITEM], + ) await component.async_setup(config) return True @@ -107,11 +211,26 @@ class TodoItem: status: TodoItemStatus | None = None """A status or confirmation of the To-do item.""" + due: datetime.date | datetime.datetime | None = None + """The date and time that a to-do is expected to be completed. + + This field may be a date or datetime depending whether the entity feature + DUE_DATE or DUE_DATETIME are set. + """ + + description: str | None = None + """A more complete description of than that provided by the summary. + + This field may be set when TodoListEntityFeature.DESCRIPTION is supported by + the entity. + """ + class TodoListEntity(Entity): """An entity that represents a To-do list.""" _attr_todo_items: list[TodoItem] | None = None + _update_listeners: list[Callable[[list[JsonValueType] | None], None]] | None = None @property def state(self) -> int | None: @@ -149,6 +268,102 @@ class TodoListEntity(Entity): """ raise NotImplementedError() + @final + @callback + def async_subscribe_updates( + self, + listener: Callable[[list[JsonValueType] | None], None], + ) -> CALLBACK_TYPE: + """Subscribe to To-do list item updates. + + Called by websocket API. + """ + if self._update_listeners is None: + self._update_listeners = [] + self._update_listeners.append(listener) + + @callback + def unsubscribe() -> None: + if self._update_listeners: + self._update_listeners.remove(listener) + + return unsubscribe + + @final + @callback + def async_update_listeners(self) -> None: + """Push updated To-do items to all listeners.""" + if not self._update_listeners: + return + + todo_items: list[JsonValueType] = [ + dataclasses.asdict(item) for item in self.todo_items or () + ] + for listener in self._update_listeners: + listener(todo_items) + + @callback + def _async_write_ha_state(self) -> None: + """Notify to-do item subscribers.""" + super()._async_write_ha_state() + self.async_update_listeners() + + +@websocket_api.websocket_command( + { + vol.Required("type"): "todo/item/subscribe", + vol.Required("entity_id"): cv.entity_domain(DOMAIN), + } +) +@websocket_api.async_response +async def websocket_handle_subscribe_todo_items( + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] +) -> None: + """Subscribe to To-do list item updates.""" + component: EntityComponent[TodoListEntity] = hass.data[DOMAIN] + entity_id: str = msg["entity_id"] + + if not (entity := component.get_entity(entity_id)): + connection.send_error( + msg["id"], + "invalid_entity_id", + f"To-do list entity not found: {entity_id}", + ) + return + + @callback + def todo_item_listener(todo_items: list[JsonValueType] | None) -> None: + """Push updated To-do list items to websocket.""" + connection.send_message( + websocket_api.event_message( + msg["id"], + { + "items": todo_items, + }, + ) + ) + + connection.subscriptions[msg["id"]] = entity.async_subscribe_updates( + todo_item_listener + ) + connection.send_result(msg["id"]) + + # Push an initial forecast update + entity.async_update_listeners() + + +def _api_items_factory(obj: Iterable[tuple[str, Any]]) -> dict[str, str]: + """Convert CalendarEvent dataclass items to dictionary of attributes.""" + result: dict[str, str] = {} + for name, value in obj: + if value is None: + continue + if isinstance(value, (datetime.date, datetime.datetime)): + result[name] = value.isoformat() + else: + result[name] = str(value) + return result + @websocket_api.websocket_command( { @@ -173,7 +388,13 @@ async def websocket_handle_todo_item_list( items: list[TodoItem] = entity.todo_items or [] connection.send_message( websocket_api.result_message( - msg["id"], {"items": [dataclasses.asdict(item) for item in items]} + msg["id"], + { + "items": [ + dataclasses.asdict(item, dict_factory=_api_items_factory) + for item in items + ] + }, ) ) @@ -230,8 +451,17 @@ def _find_by_uid_or_summary( async def _async_add_todo_item(entity: TodoListEntity, call: ServiceCall) -> None: """Add an item to the To-do list.""" + _validate_supported_features(entity.supported_features, call.data) await entity.async_create_todo_item( - item=TodoItem(summary=call.data["item"], status=TodoItemStatus.NEEDS_ACTION) + item=TodoItem( + summary=call.data["item"], + status=TodoItemStatus.NEEDS_ACTION, + **{ + desc.todo_item_field: call.data[desc.service_field] + for desc in TODO_ITEM_FIELDS + if desc.service_field in call.data + }, + ) ) @@ -242,11 +472,20 @@ async def _async_update_todo_item(entity: TodoListEntity, call: ServiceCall) -> if not found: raise ValueError(f"Unable to find To-do item '{item}'") - update_item = TodoItem( - uid=found.uid, summary=call.data.get("rename"), status=call.data.get("status") - ) + _validate_supported_features(entity.supported_features, call.data) - await entity.async_update_todo_item(item=update_item) + await entity.async_update_todo_item( + item=TodoItem( + uid=found.uid, + summary=call.data.get("rename"), + status=call.data.get("status"), + **{ + desc.todo_item_field: call.data[desc.service_field] + for desc in TODO_ITEM_FIELDS + if desc.service_field in call.data + }, + ) + ) async def _async_remove_todo_items(entity: TodoListEntity, call: ServiceCall) -> None: @@ -258,3 +497,27 @@ async def _async_remove_todo_items(entity: TodoListEntity, call: ServiceCall) -> raise ValueError(f"Unable to find To-do item '{item}") uids.append(found.uid) await entity.async_delete_todo_items(uids=uids) + + +async def _async_get_todo_items( + entity: TodoListEntity, call: ServiceCall +) -> dict[str, Any]: + """Return items in the To-do list.""" + return { + "items": [ + dataclasses.asdict(item, dict_factory=_api_items_factory) + for item in entity.todo_items or () + if not (statuses := call.data.get("status")) or item.status in statuses + ] + } + + +async def _async_remove_completed_items(entity: TodoListEntity, _: ServiceCall) -> None: + """Remove all completed items from the To-do list.""" + uids = [ + item.uid + for item in entity.todo_items or () + if item.status == TodoItemStatus.COMPLETED and item.uid + ] + if uids: + await entity.async_delete_todo_items(uids=uids) diff --git a/homeassistant/components/todo/const.py b/homeassistant/components/todo/const.py index 5a8a6e54e8f..a605f9fcba2 100644 --- a/homeassistant/components/todo/const.py +++ b/homeassistant/components/todo/const.py @@ -4,6 +4,11 @@ from enum import IntFlag, StrEnum DOMAIN = "todo" +ATTR_DUE = "due" +ATTR_DUE_DATE = "due_date" +ATTR_DUE_DATETIME = "due_datetime" +ATTR_DESCRIPTION = "description" + class TodoListEntityFeature(IntFlag): """Supported features of the To-do List entity.""" @@ -12,6 +17,9 @@ class TodoListEntityFeature(IntFlag): DELETE_TODO_ITEM = 2 UPDATE_TODO_ITEM = 4 MOVE_TODO_ITEM = 8 + SET_DUE_DATE_ON_ITEM = 16 + SET_DUE_DATETIME_ON_ITEM = 32 + SET_DESCRIPTION_ON_ITEM = 64 class TodoItemStatus(StrEnum): diff --git a/homeassistant/components/todo/intent.py b/homeassistant/components/todo/intent.py new file mode 100644 index 00000000000..ba3545d8dfd --- /dev/null +++ b/homeassistant/components/todo/intent.py @@ -0,0 +1,54 @@ +"""Intents for the todo integration.""" +from __future__ import annotations + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent + +from . import DOMAIN, TodoItem, TodoListEntity + +INTENT_LIST_ADD_ITEM = "HassListAddItem" + + +async def async_setup_intents(hass: HomeAssistant) -> None: + """Set up the todo intents.""" + intent.async_register(hass, ListAddItemIntent()) + + +class ListAddItemIntent(intent.IntentHandler): + """Handle ListAddItem intents.""" + + intent_type = INTENT_LIST_ADD_ITEM + slot_schema = {"item": cv.string, "name": cv.string} + + async def async_handle(self, intent_obj: intent.Intent): + """Handle the intent.""" + hass = intent_obj.hass + + slots = self.async_validate_slots(intent_obj.slots) + item = slots["item"]["value"] + list_name = slots["name"]["value"] + + component: EntityComponent[TodoListEntity] = hass.data[DOMAIN] + target_list: TodoListEntity | None = None + + # Find matching list + for list_state in intent.async_match_states( + hass, name=list_name, domains=[DOMAIN] + ): + target_list = component.get_entity(list_state.entity_id) + if target_list is not None: + break + + if target_list is None: + raise intent.IntentHandleError(f"No to-do list: {list_name}") + + assert target_list is not None + + # Add to list + await target_list.async_create_todo_item(TodoItem(item)) + + response = intent_obj.create_response() + response.response_type = intent.IntentResponseType.ACTION_DONE + return response diff --git a/homeassistant/components/todo/services.yaml b/homeassistant/components/todo/services.yaml index 1bdb8aca779..bc7da7db941 100644 --- a/homeassistant/components/todo/services.yaml +++ b/homeassistant/components/todo/services.yaml @@ -1,3 +1,18 @@ +get_items: + target: + entity: + domain: todo + fields: + status: + example: "needs_action" + default: needs_action + selector: + select: + translation_key: status + options: + - needs_action + - completed + multiple: true add_item: target: entity: @@ -10,6 +25,18 @@ add_item: example: "Submit income tax return" selector: text: + due_date: + example: "2023-11-17" + selector: + date: + due_datetime: + example: "2023-11-17 13:30:00" + selector: + datetime: + description: + example: "A more complete description of the to-do item than that provided by the summary." + selector: + text: update_item: target: entity: @@ -34,6 +61,18 @@ update_item: options: - needs_action - completed + due_date: + example: "2023-11-17" + selector: + date: + due_datetime: + example: "2023-11-17 13:30:00" + selector: + datetime: + description: + example: "A more complete description of the to-do item than that provided by the summary." + selector: + text: remove_item: target: entity: @@ -45,3 +84,5 @@ remove_item: required: true selector: text: + +remove_completed_items: diff --git a/homeassistant/components/todo/strings.json b/homeassistant/components/todo/strings.json index 6ba8aaba1a5..3da921a8f47 100644 --- a/homeassistant/components/todo/strings.json +++ b/homeassistant/components/todo/strings.json @@ -6,6 +6,16 @@ } }, "services": { + "get_items": { + "name": "Get to-do list items", + "description": "Get items on a to-do list.", + "fields": { + "status": { + "name": "Status", + "description": "Only return to-do items with the specified statuses. Returns not completed actions by default." + } + } + }, "add_item": { "name": "Add to-do list item", "description": "Add a new to-do list item.", @@ -13,6 +23,18 @@ "item": { "name": "Item name", "description": "The name that represents the to-do item." + }, + "due_date": { + "name": "Due date", + "description": "The date the to-do item is expected to be completed." + }, + "due_datetime": { + "name": "Due date and time", + "description": "The date and time the to-do item is expected to be completed." + }, + "description": { + "name": "Description", + "description": "A more complete description of the to-do item than provided by the item name." } } }, @@ -31,9 +53,25 @@ "status": { "name": "Set status", "description": "A status or confirmation of the to-do item." + }, + "due_date": { + "name": "Due date", + "description": "The date the to-do item is expected to be completed." + }, + "due_datetime": { + "name": "Due date and time", + "description": "The date and time the to-do item is expected to be completed." + }, + "description": { + "name": "Description", + "description": "A more complete description of the to-do item than provided by the item name." } } }, + "remove_completed_items": { + "name": "Remove all completed to-do list items", + "description": "Remove all to-do list items that have been completed." + }, "remove_item": { "name": "Remove a to-do list item", "description": "Remove an existing to-do list item by its name.", diff --git a/homeassistant/components/todoist/todo.py b/homeassistant/components/todoist/todo.py index c0d3ec6e2ce..64e83b8cc6e 100644 --- a/homeassistant/components/todoist/todo.py +++ b/homeassistant/components/todoist/todo.py @@ -1,7 +1,8 @@ """A todo platform for Todoist.""" import asyncio -from typing import cast +import datetime +from typing import Any, cast from homeassistant.components.todo import ( TodoItem, @@ -13,6 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util import dt as dt_util from .const import DOMAIN from .coordinator import TodoistCoordinator @@ -30,6 +32,24 @@ async def async_setup_entry( ) +def _task_api_data(item: TodoItem) -> dict[str, Any]: + """Convert a TodoItem to the set of add or update arguments.""" + item_data: dict[str, Any] = {} + if summary := item.summary: + item_data["content"] = summary + if due := item.due: + if isinstance(due, datetime.datetime): + item_data["due"] = { + "date": due.date().isoformat(), + "datetime": due.isoformat(), + } + else: + item_data["due"] = {"date": due.isoformat()} + if description := item.description: + item_data["description"] = description + return item_data + + class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntity): """A Todoist TodoListEntity.""" @@ -37,6 +57,9 @@ class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntit TodoListEntityFeature.CREATE_TODO_ITEM | TodoListEntityFeature.UPDATE_TODO_ITEM | TodoListEntityFeature.DELETE_TODO_ITEM + | TodoListEntityFeature.SET_DUE_DATE_ON_ITEM + | TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM + | TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM ) def __init__( @@ -66,11 +89,21 @@ class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntit status = TodoItemStatus.COMPLETED else: status = TodoItemStatus.NEEDS_ACTION + due: datetime.date | datetime.datetime | None = None + if task_due := task.due: + if task_due.datetime: + due = dt_util.as_local( + datetime.datetime.fromisoformat(task_due.datetime) + ) + elif task_due.date: + due = datetime.date.fromisoformat(task_due.date) items.append( TodoItem( summary=task.content, uid=task.id, status=status, + due=due, + description=task.description or None, # Don't use empty string ) ) self._attr_todo_items = items @@ -81,7 +114,7 @@ class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntit if item.status != TodoItemStatus.NEEDS_ACTION: raise ValueError("Only active tasks may be created.") await self.coordinator.api.add_task( - content=item.summary or "", + **_task_api_data(item), project_id=self._project_id, ) await self.coordinator.async_refresh() @@ -89,8 +122,8 @@ class TodoistTodoListEntity(CoordinatorEntity[TodoistCoordinator], TodoListEntit async def async_update_todo_item(self, item: TodoItem) -> None: """Update a To-do item.""" uid: str = cast(str, item.uid) - if item.summary: - await self.coordinator.api.update_task(task_id=uid, content=item.summary) + if update_data := _task_api_data(item): + await self.coordinator.api.update_task(task_id=uid, **update_data) if item.status is not None: if item.status == TodoItemStatus.COMPLETED: await self.coordinator.api.close_task(task_id=uid) diff --git a/homeassistant/components/tomato/device_tracker.py b/homeassistant/components/tomato/device_tracker.py index da64157dad8..d71dd45bcfe 100644 --- a/homeassistant/components/tomato/device_tracker.py +++ b/homeassistant/components/tomato/device_tracker.py @@ -100,10 +100,10 @@ class TomatoDeviceScanner(DeviceScanner): try: if self.ssl: response = requests.Session().send( - self.req, timeout=3, verify=self.verify_ssl + self.req, timeout=60, verify=self.verify_ssl ) else: - response = requests.Session().send(self.req, timeout=3) + response = requests.Session().send(self.req, timeout=60) # Calling and parsing the Tomato api here. We only need the # wldev and dhcpd_lease values. diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index e0ac41bdec6..162344f04ec 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -1,7 +1,7 @@ { "domain": "tplink", "name": "TP-Link Kasa Smart", - "codeowners": ["@rytilahti", "@thegardenmonkey"], + "codeowners": ["@rytilahti", "@thegardenmonkey", "@bdraco"], "config_flow": true, "dependencies": ["network"], "dhcp": [ diff --git a/homeassistant/components/tractive/__init__.py b/homeassistant/components/tractive/__init__.py index 300d7ebafc7..8dd0ed8e91b 100644 --- a/homeassistant/components/tractive/__init__.py +++ b/homeassistant/components/tractive/__init__.py @@ -24,11 +24,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import ( ATTR_ACTIVITY_LABEL, - ATTR_BUZZER, ATTR_CALORIES, ATTR_DAILY_GOAL, - ATTR_LED, - ATTR_LIVE_TRACKING, ATTR_MINUTES_ACTIVE, ATTR_MINUTES_DAY_SLEEP, ATTR_MINUTES_NIGHT_SLEEP, @@ -40,10 +37,12 @@ from .const import ( DOMAIN, RECONNECT_INTERVAL, SERVER_UNAVAILABLE, + SWITCH_KEY_MAP, TRACKABLES, TRACKER_ACTIVITY_STATUS_UPDATED, TRACKER_HARDWARE_STATUS_UPDATED, TRACKER_POSITION_UPDATED, + TRACKER_SWITCH_STATUS_UPDATED, TRACKER_WELLNESS_STATUS_UPDATED, ) @@ -225,13 +224,16 @@ class TractiveClient: ): self._last_hw_time = event["hardware"]["time"] self._send_hardware_update(event) - if ( "position" in event and self._last_pos_time != event["position"]["time"] ): self._last_pos_time = event["position"]["time"] self._send_position_update(event) + # If any key belonging to the switch is present in the event, + # we send a switch status update + if bool(set(SWITCH_KEY_MAP.values()).intersection(event)): + self._send_switch_update(event) except aiotractive.exceptions.UnauthorizedError: self._config_entry.async_start_reauth(self._hass) await self.unsubscribe() @@ -266,14 +268,21 @@ class TractiveClient: ATTR_BATTERY_LEVEL: event["hardware"]["battery_level"], ATTR_TRACKER_STATE: event["tracker_state"].lower(), ATTR_BATTERY_CHARGING: event["charging_state"] == "CHARGING", - ATTR_LIVE_TRACKING: event.get("live_tracking", {}).get("active"), - ATTR_BUZZER: event.get("buzzer_control", {}).get("active"), - ATTR_LED: event.get("led_control", {}).get("active"), } self._dispatch_tracker_event( TRACKER_HARDWARE_STATUS_UPDATED, event["tracker_id"], payload ) + def _send_switch_update(self, event: dict[str, Any]) -> None: + # Sometimes the event contains data for all switches, sometimes only for one. + payload = {} + for switch, key in SWITCH_KEY_MAP.items(): + if switch_data := event.get(key): + payload[switch] = switch_data["active"] + self._dispatch_tracker_event( + TRACKER_SWITCH_STATUS_UPDATED, event["tracker_id"], payload + ) + def _send_activity_update(self, event: dict[str, Any]) -> None: payload = { ATTR_MINUTES_ACTIVE: event["progress"]["achieved_minutes"], diff --git a/homeassistant/components/tractive/const.py b/homeassistant/components/tractive/const.py index 254a8c274f3..acb4f6f7487 100644 --- a/homeassistant/components/tractive/const.py +++ b/homeassistant/components/tractive/const.py @@ -26,9 +26,16 @@ CLIENT_ID = "625e5349c3c3b41c28a669f1" CLIENT = "client" TRACKABLES = "trackables" +TRACKER_ACTIVITY_STATUS_UPDATED = f"{DOMAIN}_tracker_activity_updated" TRACKER_HARDWARE_STATUS_UPDATED = f"{DOMAIN}_tracker_hardware_status_updated" TRACKER_POSITION_UPDATED = f"{DOMAIN}_tracker_position_updated" -TRACKER_ACTIVITY_STATUS_UPDATED = f"{DOMAIN}_tracker_activity_updated" +TRACKER_SWITCH_STATUS_UPDATED = f"{DOMAIN}_tracker_switch_updated" TRACKER_WELLNESS_STATUS_UPDATED = f"{DOMAIN}_tracker_wellness_updated" SERVER_UNAVAILABLE = f"{DOMAIN}_server_unavailable" + +SWITCH_KEY_MAP = { + ATTR_LIVE_TRACKING: "live_tracking", + ATTR_BUZZER: "buzzer_control", + ATTR_LED: "led_control", +} diff --git a/homeassistant/components/tractive/switch.py b/homeassistant/components/tractive/switch.py index 55acdb9bdcd..58c82bd6514 100644 --- a/homeassistant/components/tractive/switch.py +++ b/homeassistant/components/tractive/switch.py @@ -21,7 +21,7 @@ from .const import ( CLIENT, DOMAIN, TRACKABLES, - TRACKER_HARDWARE_STATUS_UPDATED, + TRACKER_SWITCH_STATUS_UPDATED, ) from .entity import TractiveEntity @@ -99,11 +99,10 @@ class TractiveSwitch(TractiveEntity, SwitchEntity): client, item.trackable, item.tracker_details, - f"{TRACKER_HARDWARE_STATUS_UPDATED}-{item.tracker_details['_id']}", + f"{TRACKER_SWITCH_STATUS_UPDATED}-{item.tracker_details['_id']}", ) self._attr_unique_id = f"{item.trackable['_id']}_{description.key}" - self._attr_available = False self._tracker = item.tracker self._method = getattr(self, description.method) self.entity_description = description @@ -111,9 +110,15 @@ class TractiveSwitch(TractiveEntity, SwitchEntity): @callback def handle_status_update(self, event: dict[str, Any]) -> None: """Handle status update.""" + if self.entity_description.key not in event: + return + + # We received an event, so the service is online and the switch entities should + # be available. + self._attr_available = True self._attr_is_on = event[self.entity_description.key] - super().handle_status_update(event) + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn on a switch.""" diff --git a/homeassistant/components/tradfri/fan.py b/homeassistant/components/tradfri/fan.py index c41b24a2647..5c0f05004ba 100644 --- a/homeassistant/components/tradfri/fan.py +++ b/homeassistant/components/tradfri/fan.py @@ -119,8 +119,7 @@ class TradfriAirPurifierFan(TradfriBaseEntity, FanEntity): if not self._device_control: return - if not preset_mode == ATTR_AUTO: - raise ValueError("Preset must be 'Auto'.") + # Preset must be 'Auto' await self._api(self._device_control.turn_on_auto_mode()) diff --git a/homeassistant/components/trafikverket_camera/__init__.py b/homeassistant/components/trafikverket_camera/__init__.py index d9d28cfe13b..3ac3ce35882 100644 --- a/homeassistant/components/trafikverket_camera/__init__.py +++ b/homeassistant/components/trafikverket_camera/__init__.py @@ -6,7 +6,7 @@ import logging from pytrafikverket.trafikverket_camera import TrafikverketCamera from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_API_KEY +from homeassistant.const import CONF_API_KEY, CONF_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -42,13 +42,12 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Migrate old entry.""" + api_key = entry.data[CONF_API_KEY] + web_session = async_get_clientsession(hass) + camera_api = TrafikverketCamera(web_session, api_key) # Change entry unique id from location to camera id if entry.version == 1: location = entry.data[CONF_LOCATION] - api_key = entry.data[CONF_API_KEY] - - web_session = async_get_clientsession(hass) - camera_api = TrafikverketCamera(web_session, api_key) try: camera_info = await camera_api.async_get_camera(location) @@ -60,14 +59,40 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if camera_id := camera_info.camera_id: entry.version = 2 - _LOGGER.debug( - "Migrate Trafikverket Camera config entry unique id to %s", - camera_id, - ) hass.config_entries.async_update_entry( entry, unique_id=f"{DOMAIN}-{camera_id}", ) + _LOGGER.debug( + "Migrated Trafikverket Camera config entry unique id to %s", + camera_id, + ) + else: + _LOGGER.error("Could not migrate the config entry. Camera has no id") + return False + + # Change entry data from location to id + if entry.version == 2: + location = entry.data[CONF_LOCATION] + + try: + camera_info = await camera_api.async_get_camera(location) + except Exception: # pylint: disable=broad-except + _LOGGER.error( + "Could not migrate the config entry. No connection to the api" + ) + return False + + if camera_id := camera_info.camera_id: + entry.version = 3 + _LOGGER.debug( + "Migrate Trafikverket Camera config entry unique id to %s", + camera_id, + ) + new_data = entry.data.copy() + new_data.pop(CONF_LOCATION) + new_data[CONF_ID] = camera_id + hass.config_entries.async_update_entry(entry, data=new_data) return True _LOGGER.error("Could not migrate the config entry. Camera has no id") return False diff --git a/homeassistant/components/trafikverket_camera/config_flow.py b/homeassistant/components/trafikverket_camera/config_flow.py index e75bc0bfa30..7572855b7d4 100644 --- a/homeassistant/components/trafikverket_camera/config_flow.py +++ b/homeassistant/components/trafikverket_camera/config_flow.py @@ -14,7 +14,7 @@ from pytrafikverket.trafikverket_camera import CameraInfo, TrafikverketCamera import voluptuous as vol from homeassistant import config_entries -from homeassistant.const import CONF_API_KEY +from homeassistant.const import CONF_API_KEY, CONF_ID from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv @@ -25,7 +25,7 @@ from .const import CONF_LOCATION, DOMAIN class TVCameraConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for Trafikverket Camera integration.""" - VERSION = 2 + VERSION = 3 entry: config_entries.ConfigEntry | None @@ -53,10 +53,7 @@ class TVCameraConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): if camera_info: camera_id = camera_info.camera_id - if _location := camera_info.location: - camera_location = _location - else: - camera_location = camera_info.camera_name + camera_location = camera_info.camera_name or "Trafikverket Camera" return (errors, camera_location, camera_id) @@ -76,9 +73,7 @@ class TVCameraConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): api_key = user_input[CONF_API_KEY] assert self.entry is not None - errors, _, _ = await self.validate_input( - api_key, self.entry.data[CONF_LOCATION] - ) + errors, _, _ = await self.validate_input(api_key, self.entry.data[CONF_ID]) if not errors: self.hass.config_entries.async_update_entry( @@ -121,10 +116,7 @@ class TVCameraConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() return self.async_create_entry( title=camera_location, - data={ - CONF_API_KEY: api_key, - CONF_LOCATION: camera_location, - }, + data={CONF_API_KEY: api_key, CONF_ID: camera_id}, ) return self.async_show_form( diff --git a/homeassistant/components/trafikverket_camera/coordinator.py b/homeassistant/components/trafikverket_camera/coordinator.py index eb5a047ca73..8270fecd487 100644 --- a/homeassistant/components/trafikverket_camera/coordinator.py +++ b/homeassistant/components/trafikverket_camera/coordinator.py @@ -15,13 +15,13 @@ from pytrafikverket.exceptions import ( from pytrafikverket.trafikverket_camera import CameraInfo, TrafikverketCamera from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_API_KEY +from homeassistant.const import CONF_API_KEY, CONF_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_LOCATION, DOMAIN +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) TIME_BETWEEN_UPDATES = timedelta(minutes=5) @@ -48,14 +48,14 @@ class TVDataUpdateCoordinator(DataUpdateCoordinator[CameraData]): ) self.session = async_get_clientsession(hass) self._camera_api = TrafikverketCamera(self.session, entry.data[CONF_API_KEY]) - self._location = entry.data[CONF_LOCATION] + self._id = entry.data[CONF_ID] async def _async_update_data(self) -> CameraData: """Fetch data from Trafikverket.""" camera_data: CameraInfo image: bytes | None = None try: - camera_data = await self._camera_api.async_get_camera(self._location) + camera_data = await self._camera_api.async_get_camera(self._id) except (NoCameraFound, MultipleCamerasFound, UnknownError) as error: raise UpdateFailed from error except InvalidAuthentication as error: diff --git a/homeassistant/components/trafikverket_camera/manifest.json b/homeassistant/components/trafikverket_camera/manifest.json index a679bd27d50..31eb911e24d 100644 --- a/homeassistant/components/trafikverket_camera/manifest.json +++ b/homeassistant/components/trafikverket_camera/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_camera", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==0.3.8"] + "requirements": ["pytrafikverket==0.3.9.1"] } diff --git a/homeassistant/components/trafikverket_ferry/manifest.json b/homeassistant/components/trafikverket_ferry/manifest.json index a62c05a9baf..7f750c26c57 100644 --- a/homeassistant/components/trafikverket_ferry/manifest.json +++ b/homeassistant/components/trafikverket_ferry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_ferry", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==0.3.8"] + "requirements": ["pytrafikverket==0.3.9.1"] } diff --git a/homeassistant/components/trafikverket_train/config_flow.py b/homeassistant/components/trafikverket_train/config_flow.py index b7808dc38b2..df05942add1 100644 --- a/homeassistant/components/trafikverket_train/config_flow.py +++ b/homeassistant/components/trafikverket_train/config_flow.py @@ -9,7 +9,6 @@ from typing import Any from pytrafikverket import TrafikverketTrain from pytrafikverket.exceptions import ( InvalidAuthentication, - MultipleTrainAnnouncementFound, MultipleTrainStationsFound, NoTrainAnnouncementFound, NoTrainStationFound, @@ -107,8 +106,6 @@ async def validate_input( errors["base"] = "more_stations" except NoTrainAnnouncementFound: errors["base"] = "no_trains" - except MultipleTrainAnnouncementFound: - errors["base"] = "multiple_trains" except UnknownError as error: _LOGGER.error("Unknown error occurred during validation %s", str(error)) errors["base"] = "cannot_connect" diff --git a/homeassistant/components/trafikverket_train/coordinator.py b/homeassistant/components/trafikverket_train/coordinator.py index ea852ab7fdf..91a7e9f07b2 100644 --- a/homeassistant/components/trafikverket_train/coordinator.py +++ b/homeassistant/components/trafikverket_train/coordinator.py @@ -8,7 +8,6 @@ import logging from pytrafikverket import TrafikverketTrain from pytrafikverket.exceptions import ( InvalidAuthentication, - MultipleTrainAnnouncementFound, NoTrainAnnouncementFound, UnknownError, ) @@ -112,7 +111,6 @@ class TVDataUpdateCoordinator(DataUpdateCoordinator[TrainData]): raise ConfigEntryAuthFailed from error except ( NoTrainAnnouncementFound, - MultipleTrainAnnouncementFound, UnknownError, ) as error: raise UpdateFailed( diff --git a/homeassistant/components/trafikverket_train/manifest.json b/homeassistant/components/trafikverket_train/manifest.json index 8c23cb02258..b68a56b3793 100644 --- a/homeassistant/components/trafikverket_train/manifest.json +++ b/homeassistant/components/trafikverket_train/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_train", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==0.3.8"] + "requirements": ["pytrafikverket==0.3.9.1"] } diff --git a/homeassistant/components/trafikverket_train/strings.json b/homeassistant/components/trafikverket_train/strings.json index 78d69c880ae..a2c286867b2 100644 --- a/homeassistant/components/trafikverket_train/strings.json +++ b/homeassistant/components/trafikverket_train/strings.json @@ -10,7 +10,6 @@ "invalid_station": "Could not find a station with the specified name", "more_stations": "Found multiple stations with the specified name", "no_trains": "No train found", - "multiple_trains": "Multiple trains found", "incorrect_api_key": "Invalid API key for selected account" }, "step": { diff --git a/homeassistant/components/trafikverket_weatherstation/config_flow.py b/homeassistant/components/trafikverket_weatherstation/config_flow.py index f8f86298045..89cbd373665 100644 --- a/homeassistant/components/trafikverket_weatherstation/config_flow.py +++ b/homeassistant/components/trafikverket_weatherstation/config_flow.py @@ -1,6 +1,9 @@ """Adds config flow for Trafikverket Weather integration.""" from __future__ import annotations +from collections.abc import Mapping +from typing import Any + from pytrafikverket.exceptions import ( InvalidAuthentication, MultipleWeatherStationsFound, @@ -23,7 +26,7 @@ class TVWeatherConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 1 - entry: config_entries.ConfigEntry + entry: config_entries.ConfigEntry | None = None async def validate_input(self, sensor_api: str, station: str) -> None: """Validate input from user input.""" @@ -71,3 +74,47 @@ class TVWeatherConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult: + """Handle re-authentication with Trafikverket.""" + + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Confirm re-authentication with Trafikverket.""" + errors: dict[str, str] = {} + + if user_input: + api_key = user_input[CONF_API_KEY] + + assert self.entry is not None + + try: + await self.validate_input(api_key, self.entry.data[CONF_STATION]) + except InvalidAuthentication: + errors["base"] = "invalid_auth" + except NoWeatherStationFound: + errors["base"] = "invalid_station" + except MultipleWeatherStationsFound: + errors["base"] = "more_stations" + except Exception: # pylint: disable=broad-exception-caught + errors["base"] = "cannot_connect" + else: + self.hass.config_entries.async_update_entry( + self.entry, + data={ + **self.entry.data, + CONF_API_KEY: api_key, + }, + ) + await self.hass.config_entries.async_reload(self.entry.entry_id) + return self.async_abort(reason="reauth_successful") + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema({vol.Required(CONF_API_KEY): cv.string}), + errors=errors, + ) diff --git a/homeassistant/components/trafikverket_weatherstation/const.py b/homeassistant/components/trafikverket_weatherstation/const.py index 0d4680e9b37..34c18359ee4 100644 --- a/homeassistant/components/trafikverket_weatherstation/const.py +++ b/homeassistant/components/trafikverket_weatherstation/const.py @@ -5,13 +5,3 @@ DOMAIN = "trafikverket_weatherstation" CONF_STATION = "station" PLATFORMS = [Platform.SENSOR] ATTRIBUTION = "Data provided by Trafikverket" - -NONE_IS_ZERO_SENSORS = { - "air_temp", - "road_temp", - "wind_direction", - "wind_speed", - "wind_speed_max", - "humidity", - "precipitation_amount", -} diff --git a/homeassistant/components/trafikverket_weatherstation/manifest.json b/homeassistant/components/trafikverket_weatherstation/manifest.json index d13eda72835..bd4b2b99b6a 100644 --- a/homeassistant/components/trafikverket_weatherstation/manifest.json +++ b/homeassistant/components/trafikverket_weatherstation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_weatherstation", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==0.3.8"] + "requirements": ["pytrafikverket==0.3.9.1"] } diff --git a/homeassistant/components/trafikverket_weatherstation/sensor.py b/homeassistant/components/trafikverket_weatherstation/sensor.py index 3ec7d137b6e..607a230fbbe 100644 --- a/homeassistant/components/trafikverket_weatherstation/sensor.py +++ b/homeassistant/components/trafikverket_weatherstation/sensor.py @@ -1,9 +1,11 @@ """Weather information for air and road temperature (by Trafikverket).""" from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from datetime import datetime -from typing import TYPE_CHECKING + +from pytrafikverket.trafikverket_weather import WeatherStationInfo from homeassistant.components.sensor import ( SensorDeviceClass, @@ -15,6 +17,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( DEGREE, PERCENTAGE, + UnitOfLength, UnitOfSpeed, UnitOfTemperature, UnitOfVolumetricFlux, @@ -24,48 +27,18 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from homeassistant.util.dt import as_utc +from homeassistant.util import dt as dt_util -from .const import ATTRIBUTION, CONF_STATION, DOMAIN, NONE_IS_ZERO_SENSORS +from .const import ATTRIBUTION, CONF_STATION, DOMAIN from .coordinator import TVDataUpdateCoordinator -WIND_DIRECTIONS = [ - "east", - "north_east", - "east_south_east", - "north", - "north_north_east", - "north_north_west", - "north_west", - "south", - "south_east", - "south_south_west", - "south_west", - "west", -] -PRECIPITATION_AMOUNTNAME = [ - "error", - "mild_rain", - "moderate_rain", - "heavy_rain", - "mild_snow_rain", - "moderate_snow_rain", - "heavy_snow_rain", - "mild_snow", - "moderate_snow", - "heavy_snow", - "other", - "none", - "error", -] PRECIPITATION_TYPE = [ - "drizzle", - "hail", - "none", + "no", "rain", - "snow", - "rain_snow_mixed", "freezing_rain", + "snow", + "sleet", + "yes", ] @@ -73,7 +46,7 @@ PRECIPITATION_TYPE = [ class TrafikverketRequiredKeysMixin: """Mixin for required keys.""" - api_key: str + value_fn: Callable[[WeatherStationInfo], StateType | datetime] @dataclass @@ -83,11 +56,18 @@ class TrafikverketSensorEntityDescription( """Describes Trafikverket sensor entity.""" +def add_utc_timezone(date_time: datetime | None) -> datetime | None: + """Add UTC timezone if datetime.""" + if date_time: + return date_time.replace(tzinfo=dt_util.UTC) + return None + + SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="air_temp", translation_key="air_temperature", - api_key="air_temp", + value_fn=lambda data: data.air_temp or 0, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, @@ -95,7 +75,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="road_temp", translation_key="road_temperature", - api_key="road_temp", + value_fn=lambda data: data.road_temp or 0, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, @@ -103,8 +83,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="precipitation", translation_key="precipitation", - api_key="precipitationtype_translated", - name="Precipitation type", + value_fn=lambda data: data.precipitationtype, icon="mdi:weather-snowy-rainy", entity_registry_enabled_default=False, options=PRECIPITATION_TYPE, @@ -113,24 +92,14 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="wind_direction", translation_key="wind_direction", - api_key="winddirection", - name="Wind direction", + value_fn=lambda data: data.winddirection, native_unit_of_measurement=DEGREE, icon="mdi:flag-triangle", state_class=SensorStateClass.MEASUREMENT, ), - TrafikverketSensorEntityDescription( - key="wind_direction_text", - translation_key="wind_direction_text", - api_key="winddirectiontext_translated", - name="Wind direction text", - icon="mdi:flag-triangle", - options=WIND_DIRECTIONS, - device_class=SensorDeviceClass.ENUM, - ), TrafikverketSensorEntityDescription( key="wind_speed", - api_key="windforce", + value_fn=lambda data: data.windforce or 0, native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, device_class=SensorDeviceClass.WIND_SPEED, state_class=SensorStateClass.MEASUREMENT, @@ -138,7 +107,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="wind_speed_max", translation_key="wind_speed_max", - api_key="windforcemax", + value_fn=lambda data: data.windforcemax or 0, native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, device_class=SensorDeviceClass.WIND_SPEED, icon="mdi:weather-windy-variant", @@ -147,7 +116,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( ), TrafikverketSensorEntityDescription( key="humidity", - api_key="humidity", + value_fn=lambda data: data.humidity or 0, native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.HUMIDITY, entity_registry_enabled_default=False, @@ -155,24 +124,85 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( ), TrafikverketSensorEntityDescription( key="precipitation_amount", - api_key="precipitation_amount", + value_fn=lambda data: data.precipitation_amount or 0, native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, device_class=SensorDeviceClass.PRECIPITATION_INTENSITY, state_class=SensorStateClass.MEASUREMENT, ), - TrafikverketSensorEntityDescription( - key="precipitation_amountname", - translation_key="precipitation_amountname", - api_key="precipitation_amountname_translated", - icon="mdi:weather-pouring", - entity_registry_enabled_default=False, - options=PRECIPITATION_AMOUNTNAME, - device_class=SensorDeviceClass.ENUM, - ), TrafikverketSensorEntityDescription( key="measure_time", translation_key="measure_time", - api_key="measure_time", + value_fn=lambda data: data.measure_time, + icon="mdi:clock", + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + ), + TrafikverketSensorEntityDescription( + key="dew_point", + translation_key="dew_point", + value_fn=lambda data: data.dew_point or 0, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + TrafikverketSensorEntityDescription( + key="visible_distance", + translation_key="visible_distance", + value_fn=lambda data: data.visible_distance, + native_unit_of_measurement=UnitOfLength.METERS, + device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TrafikverketSensorEntityDescription( + key="road_ice_depth", + translation_key="road_ice_depth", + value_fn=lambda data: data.road_ice_depth, + native_unit_of_measurement=UnitOfLength.MILLIMETERS, + device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TrafikverketSensorEntityDescription( + key="road_snow_depth", + translation_key="road_snow_depth", + value_fn=lambda data: data.road_snow_depth, + native_unit_of_measurement=UnitOfLength.MILLIMETERS, + device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TrafikverketSensorEntityDescription( + key="road_water_depth", + translation_key="road_water_depth", + value_fn=lambda data: data.road_water_depth, + native_unit_of_measurement=UnitOfLength.MILLIMETERS, + device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TrafikverketSensorEntityDescription( + key="road_water_equivalent_depth", + translation_key="road_water_equivalent_depth", + value_fn=lambda data: data.road_water_equivalent_depth, + native_unit_of_measurement=UnitOfLength.MILLIMETERS, + device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TrafikverketSensorEntityDescription( + key="wind_height", + translation_key="wind_height", + value_fn=lambda data: data.wind_height, + native_unit_of_measurement=UnitOfLength.METERS, + device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TrafikverketSensorEntityDescription( + key="modified_time", + translation_key="modified_time", + value_fn=lambda data: add_utc_timezone(data.modified_time), icon="mdi:clock", entity_registry_enabled_default=False, device_class=SensorDeviceClass.TIMESTAMP, @@ -195,12 +225,6 @@ async def async_setup_entry( ) -def _to_datetime(measuretime: str) -> datetime: - """Return isoformatted utc time.""" - time_obj = datetime.strptime(measuretime, "%Y-%m-%dT%H:%M:%S.%f%z") - return as_utc(time_obj) - - class TrafikverketWeatherStation( CoordinatorEntity[TVDataUpdateCoordinator], SensorEntity ): @@ -233,23 +257,4 @@ class TrafikverketWeatherStation( @property def native_value(self) -> StateType | datetime: """Return state of sensor.""" - if self.entity_description.api_key == "measure_time": - if TYPE_CHECKING: - assert self.coordinator.data.measure_time - return self.coordinator.data.measure_time - - state: StateType = getattr( - self.coordinator.data, self.entity_description.api_key - ) - - # For zero value state the api reports back None for certain sensors. - if state is None and self.entity_description.key in NONE_IS_ZERO_SENSORS: - return 0 - return state - - @property - def available(self) -> bool: - """Return if entity is available.""" - if TYPE_CHECKING: - assert self.coordinator.data.active - return self.coordinator.data.active and super().available + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/trafikverket_weatherstation/strings.json b/homeassistant/components/trafikverket_weatherstation/strings.json index 9ff1b077f33..a4838dab0e2 100644 --- a/homeassistant/components/trafikverket_weatherstation/strings.json +++ b/homeassistant/components/trafikverket_weatherstation/strings.json @@ -1,7 +1,8 @@ { "config": { "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -15,6 +16,11 @@ "api_key": "[%key:common::config_flow::data::api_key%]", "station": "Station" } + }, + "reauth_confirm": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + } } } }, @@ -29,58 +35,46 @@ "precipitation": { "name": "Precipitation type", "state": { - "drizzle": "Drizzle", - "hail": "Hail", - "none": "None", + "no": "None", "rain": "Rain", + "freezing_rain": "Freezing rain", "snow": "Snow", - "rain_snow_mixed": "Rain and snow mixed", - "freezing_rain": "Freezing rain" + "sleet": "Sleet", + "yes": "Yes (unknown)" } }, "wind_direction": { "name": "Wind direction" }, - "wind_direction_text": { - "name": "Wind direction text", - "state": { - "east": "East", - "north_east": "North east", - "east_south_east": "East-south east", - "north": "North", - "north_north_east": "North-north east", - "north_north_west": "North-north west", - "north_west": "North west", - "south": "South", - "south_east": "South east", - "south_south_west": "South-south west", - "south_west": "South west", - "west": "West" - } - }, "wind_speed_max": { "name": "Wind speed max" }, - "precipitation_amountname": { - "name": "Precipitation name", - "state": { - "error": "Error", - "mild_rain": "Mild rain", - "moderate_rain": "Moderate rain", - "heavy_rain": "Heavy rain", - "mild_snow_rain": "Mild rain and snow mixed", - "moderate_snow_rain": "Moderate rain and snow mixed", - "heavy_snow_rain": "Heavy rain and snow mixed", - "mild_snow": "Mild snow", - "moderate_snow": "Moderate snow", - "heavy_snow": "Heavy snow", - "other": "Other", - "none": "None", - "unknown": "Unknown" - } - }, "measure_time": { "name": "Measure time" + }, + "dew_point": { + "name": "Dew point" + }, + "visible_distance": { + "name": "Visible distance" + }, + "road_ice_depth": { + "name": "Ice depth on road" + }, + "road_snow_depth": { + "name": "Snow depth on road" + }, + "road_water_depth": { + "name": "Water depth on road" + }, + "road_water_equivalent_depth": { + "name": "Water equivalent depth on road" + }, + "wind_height": { + "name": "Wind measurement height" + }, + "modified_time": { + "name": "Data modified time" } } } diff --git a/homeassistant/components/trend/binary_sensor.py b/homeassistant/components/trend/binary_sensor.py index 2d00f35202c..fa6ad8e5382 100644 --- a/homeassistant/components/trend/binary_sensor.py +++ b/homeassistant/components/trend/binary_sensor.py @@ -52,23 +52,39 @@ from .const import ( CONF_INVERT, CONF_MAX_SAMPLES, CONF_MIN_GRADIENT, + CONF_MIN_SAMPLES, CONF_SAMPLE_DURATION, DOMAIN, ) _LOGGER = logging.getLogger(__name__) -SENSOR_SCHEMA = vol.Schema( - { - vol.Required(CONF_ENTITY_ID): cv.entity_id, - vol.Optional(CONF_ATTRIBUTE): cv.string, - vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, - vol.Optional(CONF_FRIENDLY_NAME): cv.string, - vol.Optional(CONF_INVERT, default=False): cv.boolean, - vol.Optional(CONF_MAX_SAMPLES, default=2): cv.positive_int, - vol.Optional(CONF_MIN_GRADIENT, default=0.0): vol.Coerce(float), - vol.Optional(CONF_SAMPLE_DURATION, default=0): cv.positive_int, - } + +def _validate_min_max(data: dict[str, Any]) -> dict[str, Any]: + if ( + CONF_MIN_SAMPLES in data + and CONF_MAX_SAMPLES in data + and data[CONF_MAX_SAMPLES] < data[CONF_MIN_SAMPLES] + ): + raise vol.Invalid("min_samples must be smaller than or equal to max_samples") + return data + + +SENSOR_SCHEMA = vol.All( + vol.Schema( + { + vol.Required(CONF_ENTITY_ID): cv.entity_id, + vol.Optional(CONF_ATTRIBUTE): cv.string, + vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, + vol.Optional(CONF_FRIENDLY_NAME): cv.string, + vol.Optional(CONF_INVERT, default=False): cv.boolean, + vol.Optional(CONF_MAX_SAMPLES, default=2): cv.positive_int, + vol.Optional(CONF_MIN_GRADIENT, default=0.0): vol.Coerce(float), + vol.Optional(CONF_SAMPLE_DURATION, default=0): cv.positive_int, + vol.Optional(CONF_MIN_SAMPLES, default=2): cv.positive_int, + } + ), + _validate_min_max, ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( @@ -96,6 +112,7 @@ async def async_setup_platform( max_samples = device_config[CONF_MAX_SAMPLES] min_gradient = device_config[CONF_MIN_GRADIENT] sample_duration = device_config[CONF_SAMPLE_DURATION] + min_samples = device_config[CONF_MIN_SAMPLES] sensors.append( SensorTrend( @@ -109,8 +126,10 @@ async def async_setup_platform( max_samples, min_gradient, sample_duration, + min_samples, ) ) + if not sensors: _LOGGER.error("No sensors added") return @@ -137,6 +156,7 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): max_samples: int, min_gradient: float, sample_duration: int, + min_samples: int, ) -> None: """Initialize the sensor.""" self._hass = hass @@ -148,6 +168,7 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): self._invert = invert self._sample_duration = sample_duration self._min_gradient = min_gradient + self._min_samples = min_samples self.samples: deque = deque(maxlen=max_samples) @property @@ -210,7 +231,7 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): while self.samples and self.samples[0][0] < cutoff: self.samples.popleft() - if len(self.samples) < 2: + if len(self.samples) < self._min_samples: return # Calculate gradient of linear trend diff --git a/homeassistant/components/trend/const.py b/homeassistant/components/trend/const.py index 6787dc08445..3d82bfcc648 100644 --- a/homeassistant/components/trend/const.py +++ b/homeassistant/components/trend/const.py @@ -12,3 +12,4 @@ CONF_INVERT = "invert" CONF_MAX_SAMPLES = "max_samples" CONF_MIN_GRADIENT = "min_gradient" CONF_SAMPLE_DURATION = "sample_duration" +CONF_MIN_SAMPLES = "min_samples" diff --git a/homeassistant/components/tts/manifest.json b/homeassistant/components/tts/manifest.json index 338a8c35003..f379dc01dee 100644 --- a/homeassistant/components/tts/manifest.json +++ b/homeassistant/components/tts/manifest.json @@ -2,7 +2,7 @@ "domain": "tts", "name": "Text-to-speech (TTS)", "after_dependencies": ["media_player"], - "codeowners": ["@home-assistant/core", "@pvizeli"], + "codeowners": ["@home-assistant/core"], "dependencies": ["http", "ffmpeg"], "documentation": "https://www.home-assistant.io/integrations/tts", "integration_type": "entity", diff --git a/homeassistant/components/tuya/const.py b/homeassistant/components/tuya/const.py index acf9f8bbd2c..19faa76a191 100644 --- a/homeassistant/components/tuya/const.py +++ b/homeassistant/components/tuya/const.py @@ -338,6 +338,7 @@ class DPCode(StrEnum): TEMP_VALUE_V2 = "temp_value_v2" TEMPER_ALARM = "temper_alarm" # Tamper alarm TIME_TOTAL = "time_total" + TIME_USE = "time_use" # Total seconds of irrigation TOTAL_CLEAN_AREA = "total_clean_area" TOTAL_CLEAN_COUNT = "total_clean_count" TOTAL_CLEAN_TIME = "total_clean_time" @@ -362,6 +363,7 @@ class DPCode(StrEnum): WATER_RESET = "water_reset" # Resetting of water usage days WATER_SET = "water_set" # Water level WATERSENSOR_STATE = "watersensor_state" + WEATHER_DELAY = "weather_delay" WET = "wet" # Humidification WINDOW_CHECK = "window_check" WINDOW_STATE = "window_state" diff --git a/homeassistant/components/tuya/select.py b/homeassistant/components/tuya/select.py index 3cc8c72f555..bc44ddf479c 100644 --- a/homeassistant/components/tuya/select.py +++ b/homeassistant/components/tuya/select.py @@ -75,6 +75,16 @@ SELECTS: dict[str, tuple[SelectEntityDescription, ...]] = { icon="mdi:thermometer-lines", ), ), + # Smart Water Timer + "sfkzq": ( + # Irrigation will not be run within this set delay period + SelectEntityDescription( + key=DPCode.WEATHER_DELAY, + translation_key="weather_delay", + icon="mdi:weather-cloudy-clock", + entity_category=EntityCategory.CONFIG, + ), + ), # Siren Alarm # https://developer.tuya.com/en/docs/iot/categorysgbj?id=Kaiuz37tlpbnu "sgbj": ( diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index 9f055a6262e..4bf8808f5f1 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -517,6 +517,18 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { ), *BATTERY_SENSORS, ), + # Smart Water Timer + "sfkzq": ( + # Total seconds of irrigation. Read-write value; the device appears to ignore the write action (maybe firmware bug) + TuyaSensorEntityDescription( + key=DPCode.TIME_USE, + translation_key="total_watering_time", + icon="mdi:history", + state_class=SensorStateClass.TOTAL_INCREASING, + entity_category=EntityCategory.DIAGNOSTIC, + ), + *BATTERY_SENSORS, + ), # Water Detector # https://developer.tuya.com/en/docs/iot/categorysj?id=Kaiuz3iub2sli "sj": BATTERY_SENSORS, @@ -818,6 +830,27 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { native_unit_of_measurement=UnitOfElectricPotential.VOLT, subkey="voltage", ), + TuyaSensorEntityDescription( + key=DPCode.CUR_CURRENT, + translation_key="current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TuyaSensorEntityDescription( + key=DPCode.CUR_POWER, + translation_key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TuyaSensorEntityDescription( + key=DPCode.CUR_VOLTAGE, + translation_key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), ), # Robot Vacuum # https://developer.tuya.com/en/docs/iot/fsd?id=K9gf487ck1tlo diff --git a/homeassistant/components/tuya/strings.json b/homeassistant/components/tuya/strings.json index 9c807419551..e9b13e10a95 100644 --- a/homeassistant/components/tuya/strings.json +++ b/homeassistant/components/tuya/strings.json @@ -421,6 +421,19 @@ "4": "Mood 4", "5": "Mood 5" } + }, + "weather_delay": { + "name": "Weather delay", + "state": { + "cancel": "Cancel", + "24h": "24h", + "48h": "48h", + "72h": "72h", + "96h": "96h", + "120h": "120h", + "144h": "144h", + "168h": "168h" + } } }, "sensor": { @@ -556,6 +569,9 @@ "water_level": { "name": "Water level" }, + "total_watering_time": { + "name": "Total watering time" + }, "filter_utilization": { "name": "Filter utilization" }, diff --git a/homeassistant/components/tuya/switch.py b/homeassistant/components/tuya/switch.py index a48d797555c..ba304b4069e 100644 --- a/homeassistant/components/tuya/switch.py +++ b/homeassistant/components/tuya/switch.py @@ -430,6 +430,14 @@ SWITCHES: dict[str, tuple[SwitchEntityDescription, ...]] = { entity_category=EntityCategory.CONFIG, ), ), + # Smart Water Timer + "sfkzq": ( + SwitchEntityDescription( + key=DPCode.SWITCH, + translation_key="switch", + icon="mdi:sprinkler-variant", + ), + ), # Siren Alarm # https://developer.tuya.com/en/docs/iot/categorysgbj?id=Kaiuz37tlpbnu "sgbj": ( diff --git a/homeassistant/components/twentemilieu/manifest.json b/homeassistant/components/twentemilieu/manifest.json index 6cb98444be6..aef70aa6a10 100644 --- a/homeassistant/components/twentemilieu/manifest.json +++ b/homeassistant/components/twentemilieu/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["twentemilieu"], "quality_scale": "platinum", - "requirements": ["twentemilieu==2.0.0"] + "requirements": ["twentemilieu==2.0.1"] } diff --git a/homeassistant/components/twentemilieu/sensor.py b/homeassistant/components/twentemilieu/sensor.py index fba10a269f7..1278f6523a5 100644 --- a/homeassistant/components/twentemilieu/sensor.py +++ b/homeassistant/components/twentemilieu/sensor.py @@ -21,20 +21,13 @@ from .const import DOMAIN from .entity import TwenteMilieuEntity -@dataclass -class TwenteMilieuSensorDescriptionMixin: - """Define an entity description mixin.""" +@dataclass(kw_only=True) +class TwenteMilieuSensorDescription(SensorEntityDescription): + """Describe an Twente Milieu sensor.""" waste_type: WasteType -@dataclass -class TwenteMilieuSensorDescription( - SensorEntityDescription, TwenteMilieuSensorDescriptionMixin -): - """Describe an Ambient PWS binary sensor.""" - - SENSORS: tuple[TwenteMilieuSensorDescription, ...] = ( TwenteMilieuSensorDescription( key="tree", diff --git a/homeassistant/components/unifi/button.py b/homeassistant/components/unifi/button.py index 7471675123a..af7ab5852ab 100644 --- a/homeassistant/components/unifi/button.py +++ b/homeassistant/components/unifi/button.py @@ -11,8 +11,14 @@ from typing import Any, Generic import aiounifi from aiounifi.interfaces.api_handlers import ItemEvent from aiounifi.interfaces.devices import Devices +from aiounifi.interfaces.ports import Ports from aiounifi.models.api import ApiItemT -from aiounifi.models.device import Device, DeviceRestartRequest +from aiounifi.models.device import ( + Device, + DevicePowerCyclePortRequest, + DeviceRestartRequest, +) +from aiounifi.models.port import Port from homeassistant.components.button import ( ButtonDeviceClass, @@ -42,6 +48,15 @@ async def async_restart_device_control_fn( await api.request(DeviceRestartRequest.create(obj_id)) +@callback +async def async_power_cycle_port_control_fn( + api: aiounifi.Controller, obj_id: str +) -> None: + """Restart device.""" + mac, _, index = obj_id.partition("_") + await api.request(DevicePowerCyclePortRequest.create(mac, int(index))) + + @dataclass class UnifiButtonEntityDescriptionMixin(Generic[HandlerT, ApiItemT]): """Validate and load entities from different UniFi handlers.""" @@ -77,6 +92,24 @@ ENTITY_DESCRIPTIONS: tuple[UnifiButtonEntityDescription, ...] = ( supported_fn=lambda controller, obj_id: True, unique_id_fn=lambda controller, obj_id: f"device_restart-{obj_id}", ), + UnifiButtonEntityDescription[Ports, Port]( + key="PoE power cycle", + entity_category=EntityCategory.CONFIG, + has_entity_name=True, + device_class=ButtonDeviceClass.RESTART, + allowed_fn=lambda controller, obj_id: True, + api_handler_fn=lambda api: api.ports, + available_fn=async_device_available_fn, + control_fn=async_power_cycle_port_control_fn, + device_info_fn=async_device_device_info_fn, + event_is_on=None, + event_to_subscribe=None, + name_fn=lambda port: f"{port.name} Power Cycle", + object_fn=lambda api, obj_id: api.ports[obj_id], + should_poll=False, + supported_fn=lambda controller, obj_id: controller.api.ports[obj_id].port_poe, + unique_id_fn=lambda controller, obj_id: f"power_cycle-{obj_id}", + ), ) diff --git a/homeassistant/components/unifi/controller.py b/homeassistant/components/unifi/controller.py index b89e64f285f..6bd8b9db426 100644 --- a/homeassistant/components/unifi/controller.py +++ b/homeassistant/components/unifi/controller.py @@ -506,6 +506,14 @@ async def get_unifi_controller( ) raise CannotConnect from err + except aiounifi.Forbidden as err: + LOGGER.warning( + "Access forbidden to UniFi Network at %s, check access rights: %s", + config[CONF_HOST], + err, + ) + raise AuthenticationRequired from err + except aiounifi.LoginRequired as err: LOGGER.warning( "Connected to UniFi Network at %s but login required: %s", diff --git a/homeassistant/components/unifi/device_tracker.py b/homeassistant/components/unifi/device_tracker.py index 5c9694c669c..1be52b97974 100644 --- a/homeassistant/components/unifi/device_tracker.py +++ b/homeassistant/components/unifi/device_tracker.py @@ -17,14 +17,15 @@ from aiounifi.models.client import Client from aiounifi.models.device import Device from aiounifi.models.event import Event, EventKey -from homeassistant.components.device_tracker import ScannerEntity, SourceType +from homeassistant.components.device_tracker import DOMAIN, ScannerEntity, SourceType from homeassistant.config_entries import ConfigEntry from homeassistant.core import Event as core_Event, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.helpers.entity_registry as er import homeassistant.util.dt as dt_util -from .controller import UniFiController +from .controller import UNIFI_DOMAIN, UniFiController from .entity import ( HandlerT, UnifiEntity, @@ -175,7 +176,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiTrackerEntityDescription, ...] = ( object_fn=lambda api, obj_id: api.clients[obj_id], should_poll=False, supported_fn=lambda controller, obj_id: True, - unique_id_fn=lambda controller, obj_id: f"{obj_id}-{controller.site}", + unique_id_fn=lambda controller, obj_id: f"{controller.site}-{obj_id}", ip_address_fn=lambda api, obj_id: api.clients[obj_id].ip, hostname_fn=lambda api, obj_id: api.clients[obj_id].hostname, ), @@ -201,12 +202,37 @@ ENTITY_DESCRIPTIONS: tuple[UnifiTrackerEntityDescription, ...] = ( ) +@callback +def async_update_unique_id(hass: HomeAssistant, config_entry: ConfigEntry) -> None: + """Normalize client unique ID to have a prefix rather than suffix. + + Introduced with release 2023.12. + """ + controller: UniFiController = hass.data[UNIFI_DOMAIN][config_entry.entry_id] + ent_reg = er.async_get(hass) + + @callback + def update_unique_id(obj_id: str) -> None: + """Rework unique ID.""" + new_unique_id = f"{controller.site}-{obj_id}" + if ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, new_unique_id): + return + + unique_id = f"{obj_id}-{controller.site}" + if entity_id := ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, unique_id): + ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) + + for obj_id in list(controller.api.clients) + list(controller.api.clients_all): + update_unique_id(obj_id) + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up device tracker for UniFi Network integration.""" + async_update_unique_id(hass, config_entry) UniFiController.register_platform( hass, config_entry, async_add_entities, UnifiScannerEntity, ENTITY_DESCRIPTIONS ) diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index ed8649896dd..52ed8ec3101 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["aiounifi"], "quality_scale": "platinum", - "requirements": ["aiounifi==65"], + "requirements": ["aiounifi==66"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifi/switch.py b/homeassistant/components/unifi/switch.py index 41c1f55a22a..1e9ec8b14c8 100644 --- a/homeassistant/components/unifi/switch.py +++ b/homeassistant/components/unifi/switch.py @@ -42,9 +42,10 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.helpers.entity_registry as er from .const import ATTR_MANUFACTURER -from .controller import UniFiController +from .controller import UNIFI_DOMAIN, UniFiController from .entity import ( HandlerT, SubscriptionT, @@ -256,7 +257,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( object_fn=lambda api, obj_id: api.outlets[obj_id], should_poll=False, supported_fn=async_outlet_supports_switching_fn, - unique_id_fn=lambda controller, obj_id: f"{obj_id.split('_', 1)[0]}-outlet-{obj_id.split('_', 1)[1]}", + unique_id_fn=lambda controller, obj_id: f"outlet-{obj_id}", ), UnifiSwitchEntityDescription[PortForwarding, PortForward]( key="Port forward control", @@ -297,7 +298,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( object_fn=lambda api, obj_id: api.ports[obj_id], should_poll=False, supported_fn=lambda controller, obj_id: controller.api.ports[obj_id].port_poe, - unique_id_fn=lambda controller, obj_id: f"{obj_id.split('_', 1)[0]}-poe-{obj_id.split('_', 1)[1]}", + unique_id_fn=lambda controller, obj_id: f"poe-{obj_id}", ), UnifiSwitchEntityDescription[Wlans, Wlan]( key="WLAN control", @@ -322,12 +323,41 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( ) +@callback +def async_update_unique_id(hass: HomeAssistant, config_entry: ConfigEntry) -> None: + """Normalize switch unique ID to have a prefix rather than midfix. + + Introduced with release 2023.12. + """ + controller: UniFiController = hass.data[UNIFI_DOMAIN][config_entry.entry_id] + ent_reg = er.async_get(hass) + + @callback + def update_unique_id(obj_id: str, type_name: str) -> None: + """Rework unique ID.""" + new_unique_id = f"{type_name}-{obj_id}" + if ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, new_unique_id): + return + + prefix, _, suffix = obj_id.partition("_") + unique_id = f"{prefix}-{type_name}-{suffix}" + if entity_id := ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, unique_id): + ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) + + for obj_id in controller.api.outlets: + update_unique_id(obj_id, "outlet") + + for obj_id in controller.api.ports: + update_unique_id(obj_id, "poe") + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up switches for UniFi Network integration.""" + async_update_unique_id(hass, config_entry) UniFiController.register_platform( hass, config_entry, diff --git a/homeassistant/components/universal/manifest.json b/homeassistant/components/universal/manifest.json index 587d2c7aad2..4cf52892aaf 100644 --- a/homeassistant/components/universal/manifest.json +++ b/homeassistant/components/universal/manifest.json @@ -1,6 +1,6 @@ { "domain": "universal", - "name": "Universal Media Player", + "name": "Universal media player", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/universal", "iot_class": "calculated", diff --git a/homeassistant/components/upnp/__init__.py b/homeassistant/components/upnp/__init__.py index 326ff5d7651..6af9d85bc87 100644 --- a/homeassistant/components/upnp/__init__.py +++ b/homeassistant/components/upnp/__init__.py @@ -26,7 +26,7 @@ from .const import ( LOGGER, ) from .coordinator import UpnpDataUpdateCoordinator -from .device import async_create_device +from .device import async_create_device, get_preferred_location NOTIFICATION_ID = "upnp_notification" NOTIFICATION_TITLE = "UPnP/IGD Setup" @@ -57,7 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return nonlocal discovery_info - LOGGER.debug("Device discovered: %s, at: %s", usn, headers.ssdp_location) + LOGGER.debug("Device discovered: %s, at: %s", usn, headers.ssdp_all_locations) discovery_info = headers device_discovered_event.set() @@ -79,8 +79,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Create device. assert discovery_info is not None - assert discovery_info.ssdp_location is not None - location = discovery_info.ssdp_location + assert discovery_info.ssdp_all_locations + location = get_preferred_location(discovery_info.ssdp_all_locations) try: device = await async_create_device(hass, location) except UpnpConnectionError as err: diff --git a/homeassistant/components/upnp/config_flow.py b/homeassistant/components/upnp/config_flow.py index 35d66536375..b32273a3f24 100644 --- a/homeassistant/components/upnp/config_flow.py +++ b/homeassistant/components/upnp/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping from typing import Any, cast +from urllib.parse import urlparse import voluptuous as vol @@ -25,7 +26,7 @@ from .const import ( ST_IGD_V1, ST_IGD_V2, ) -from .device import async_get_mac_address_from_host +from .device import async_get_mac_address_from_host, get_preferred_location def _friendly_name_from_discovery(discovery_info: ssdp.SsdpServiceInfo) -> str: @@ -43,7 +44,7 @@ def _is_complete_discovery(discovery_info: ssdp.SsdpServiceInfo) -> bool: return bool( ssdp.ATTR_UPNP_UDN in discovery_info.upnp and discovery_info.ssdp_st - and discovery_info.ssdp_location + and discovery_info.ssdp_all_locations and discovery_info.ssdp_usn ) @@ -61,7 +62,9 @@ async def _async_mac_address_from_discovery( hass: HomeAssistant, discovery: SsdpServiceInfo ) -> str | None: """Get the mac address from a discovery.""" - host = discovery.ssdp_headers["_host"] + location = get_preferred_location(discovery.ssdp_all_locations) + host = urlparse(location).hostname + assert host is not None return await async_get_mac_address_from_host(hass, host) @@ -178,7 +181,9 @@ class UpnpFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): # when the location changes, the entry is reloaded. updates={ CONFIG_ENTRY_MAC_ADDRESS: mac_address, - CONFIG_ENTRY_LOCATION: discovery_info.ssdp_location, + CONFIG_ENTRY_LOCATION: get_preferred_location( + discovery_info.ssdp_all_locations + ), CONFIG_ENTRY_HOST: host, CONFIG_ENTRY_ST: discovery_info.ssdp_st, }, @@ -249,7 +254,7 @@ class UpnpFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): CONFIG_ENTRY_ORIGINAL_UDN: discovery.upnp[ssdp.ATTR_UPNP_UDN], CONFIG_ENTRY_MAC_ADDRESS: mac_address, CONFIG_ENTRY_HOST: discovery.ssdp_headers["_host"], - CONFIG_ENTRY_LOCATION: discovery.ssdp_location, + CONFIG_ENTRY_LOCATION: get_preferred_location(discovery.ssdp_all_locations), } await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False) @@ -271,7 +276,7 @@ class UpnpFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): CONFIG_ENTRY_UDN: discovery.upnp[ssdp.ATTR_UPNP_UDN], CONFIG_ENTRY_ST: discovery.ssdp_st, CONFIG_ENTRY_ORIGINAL_UDN: discovery.upnp[ssdp.ATTR_UPNP_UDN], - CONFIG_ENTRY_LOCATION: discovery.ssdp_location, + CONFIG_ENTRY_LOCATION: get_preferred_location(discovery.ssdp_all_locations), CONFIG_ENTRY_MAC_ADDRESS: mac_address, CONFIG_ENTRY_HOST: discovery.ssdp_headers["_host"], } diff --git a/homeassistant/components/upnp/device.py b/homeassistant/components/upnp/device.py index b62edbf9bc2..93f551bea37 100644 --- a/homeassistant/components/upnp/device.py +++ b/homeassistant/components/upnp/device.py @@ -33,6 +33,22 @@ from .const import ( ) +def get_preferred_location(locations: set[str]) -> str: + """Get the preferred location (an IPv4 location) from a set of locations.""" + # Prefer IPv4 over IPv6. + for location in locations: + if location.startswith("http://[") or location.startswith("https://["): + continue + + return location + + # Fallback to any. + for location in locations: + return location + + raise ValueError("No location found") + + async def async_get_mac_address_from_host(hass: HomeAssistant, host: str) -> str | None: """Get mac address from host.""" ip_addr = ip_address(host) @@ -47,13 +63,13 @@ async def async_get_mac_address_from_host(hass: HomeAssistant, host: str) -> str return mac_address -async def async_create_device(hass: HomeAssistant, ssdp_location: str) -> Device: +async def async_create_device(hass: HomeAssistant, location: str) -> Device: """Create UPnP/IGD device.""" session = async_get_clientsession(hass, verify_ssl=False) requester = AiohttpSessionRequester(session, with_sleep=True, timeout=20) factory = UpnpFactory(requester, non_strict=True) - upnp_device = await factory.async_create_device(ssdp_location) + upnp_device = await factory.async_create_device(location) # Create profile wrapper. igd_device = IgdDevice(upnp_device, None) @@ -119,8 +135,7 @@ class Device: @property def host(self) -> str | None: """Get the hostname.""" - url = self._igd_device.device.device_url - parsed = urlparse(url) + parsed = urlparse(self.device_url) return parsed.hostname @property diff --git a/homeassistant/components/vallox/fan.py b/homeassistant/components/vallox/fan.py index 2f420096c74..e58c3ebd88d 100644 --- a/homeassistant/components/vallox/fan.py +++ b/homeassistant/components/vallox/fan.py @@ -11,11 +11,7 @@ from vallox_websocket_api import ( ValloxInvalidInputException, ) -from homeassistant.components.fan import ( - FanEntity, - FanEntityFeature, - NotValidPresetModeError, -) +from homeassistant.components.fan import FanEntity, FanEntityFeature from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -200,12 +196,6 @@ class ValloxFanEntity(ValloxEntity, FanEntity): Returns true if the mode has been changed, false otherwise. """ - try: - self._valid_preset_mode_or_raise(preset_mode) - - except NotValidPresetModeError as err: - raise ValueError(f"Not valid preset mode: {preset_mode}") from err - if preset_mode == self.preset_mode: return False diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index b2b1cb31624..c23c1d5924e 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -119,9 +119,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Handle Memo Text service call.""" memo_text = call.data[CONF_MEMO_TEXT] memo_text.hass = hass - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].get_module( - call.data[CONF_ADDRESS] - ).set_memo_text(memo_text.async_render()) + await ( + hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] + .get_module(call.data[CONF_ADDRESS]) + .set_memo_text(memo_text.async_render()) + ) hass.services.async_register( DOMAIN, diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index a0e5b9da52e..b2fd090e781 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -18,9 +18,21 @@ DEV_TYPE_TO_HA = { "ESWL01": "switch", "ESWL03": "switch", "ESO15-TB": "outlet", + "LV-PUR131S": "fan", + "Core200S": "fan", + "Core300S": "fan", + "Core400S": "fan", + "Core600S": "fan", + "Vital200S": "fan", + "Vital100S": "fan", + "ESD16": "walldimmer", + "ESWD16": "walldimmer", + "ESL100": "bulb-dimmable", + "ESL100CW": "bulb-tunable-white", } SKU_TO_BASE_DEVICE = { + # Air Purifiers "LV-PUR131S": "LV-PUR131S", "LV-RH131S": "LV-PUR131S", # Alt ID Model LV-PUR131S "Core200S": "Core200S", diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 326e7daf12c..22983054dc9 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -17,20 +17,10 @@ from homeassistant.util.percentage import ( ) from .common import VeSyncDevice -from .const import DOMAIN, SKU_TO_BASE_DEVICE, VS_DISCOVERY, VS_FANS +from .const import DEV_TYPE_TO_HA, DOMAIN, SKU_TO_BASE_DEVICE, VS_DISCOVERY, VS_FANS _LOGGER = logging.getLogger(__name__) -DEV_TYPE_TO_HA = { - "LV-PUR131S": "fan", - "Core200S": "fan", - "Core300S": "fan", - "Core400S": "fan", - "Core600S": "fan", - "Vital200S": "fan", - "Vital100S": "fan", -} - FAN_MODE_AUTO = "auto" FAN_MODE_SLEEP = "sleep" FAN_MODE_PET = "pet" diff --git a/homeassistant/components/vesync/light.py b/homeassistant/components/vesync/light.py index e6cc979e808..040e9d5696d 100644 --- a/homeassistant/components/vesync/light.py +++ b/homeassistant/components/vesync/light.py @@ -14,17 +14,10 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .common import VeSyncDevice -from .const import DOMAIN, VS_DISCOVERY, VS_LIGHTS +from .const import DEV_TYPE_TO_HA, DOMAIN, VS_DISCOVERY, VS_LIGHTS _LOGGER = logging.getLogger(__name__) -DEV_TYPE_TO_HA = { - "ESD16": "walldimmer", - "ESWD16": "walldimmer", - "ESL100": "bulb-dimmable", - "ESL100CW": "bulb-tunable-white", -} - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/vesync/manifest.json b/homeassistant/components/vesync/manifest.json index fb892acfd4f..ff3f56dd184 100644 --- a/homeassistant/components/vesync/manifest.json +++ b/homeassistant/components/vesync/manifest.json @@ -1,7 +1,7 @@ { "domain": "vesync", "name": "VeSync", - "codeowners": ["@markperdue", "@webdjoe", "@thegardenmonkey"], + "codeowners": ["@markperdue", "@webdjoe", "@thegardenmonkey", "@cdnninja"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/vesync", "iot_class": "cloud_polling", diff --git a/homeassistant/components/vesync/sensor.py b/homeassistant/components/vesync/sensor.py index f3612c2d011..4277460c3ea 100644 --- a/homeassistant/components/vesync/sensor.py +++ b/homeassistant/components/vesync/sensor.py @@ -48,12 +48,12 @@ class VeSyncSensorEntityDescription( ): """Describe VeSync sensor entity.""" - exists_fn: Callable[ - [VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], bool - ] = lambda _: True - update_fn: Callable[ - [VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], None - ] = lambda _: None + exists_fn: Callable[[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], bool] = ( + lambda _: True + ) + update_fn: Callable[[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], None] = ( + lambda _: None + ) def update_energy(device): diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py index 4e3d8d05f97..525099e7d4e 100644 --- a/homeassistant/components/vicare/binary_sensor.py +++ b/homeassistant/components/vicare/binary_sensor.py @@ -5,7 +5,11 @@ from contextlib import suppress from dataclasses import dataclass import logging +from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareHeatingDevice import ( + HeatingDeviceWithComponent as PyViCareHeatingDeviceWithComponent, +) from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, PyViCareNotSupportedFeatureError, @@ -25,7 +29,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ViCareRequiredKeysMixin from .const import DOMAIN, VICARE_API, VICARE_DEVICE_CONFIG from .entity import ViCareEntity -from .utils import is_supported +from .utils import get_burners, get_circuits, get_compressors, is_supported _LOGGER = logging.getLogger(__name__) @@ -40,14 +44,14 @@ class ViCareBinarySensorEntityDescription( CIRCUIT_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = ( ViCareBinarySensorEntityDescription( key="circulationpump_active", - name="Circulation pump", + translation_key="circulation_pump", icon="mdi:pump", device_class=BinarySensorDeviceClass.RUNNING, value_getter=lambda api: api.getCirculationPumpActive(), ), ViCareBinarySensorEntityDescription( key="frost_protection_active", - name="Frost protection", + translation_key="frost_protection", icon="mdi:snowflake", value_getter=lambda api: api.getFrostProtectionActive(), ), @@ -56,7 +60,7 @@ CIRCUIT_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = ( BURNER_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = ( ViCareBinarySensorEntityDescription( key="burner_active", - name="Burner", + translation_key="burner", icon="mdi:gas-burner", device_class=BinarySensorDeviceClass.RUNNING, value_getter=lambda api: api.getActive(), @@ -66,7 +70,7 @@ BURNER_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = ( COMPRESSOR_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = ( ViCareBinarySensorEntityDescription( key="compressor_active", - name="Compressor", + translation_key="compressor", device_class=BinarySensorDeviceClass.RUNNING, value_getter=lambda api: api.getActive(), ), @@ -75,27 +79,27 @@ COMPRESSOR_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = ( GLOBAL_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = ( ViCareBinarySensorEntityDescription( key="solar_pump_active", - name="Solar pump", + translation_key="solar_pump", icon="mdi:pump", device_class=BinarySensorDeviceClass.RUNNING, value_getter=lambda api: api.getSolarPumpActive(), ), ViCareBinarySensorEntityDescription( key="charging_active", - name="DHW Charging", + translation_key="domestic_hot_water_charging", device_class=BinarySensorDeviceClass.RUNNING, value_getter=lambda api: api.getDomesticHotWaterChargingActive(), ), ViCareBinarySensorEntityDescription( key="dhw_circulationpump_active", - name="DHW Circulation Pump", + translation_key="domestic_hot_water_circulation_pump", icon="mdi:pump", device_class=BinarySensorDeviceClass.RUNNING, value_getter=lambda api: api.getDomesticHotWaterCirculationPumpActive(), ), ViCareBinarySensorEntityDescription( key="dhw_pump_active", - name="DHW Pump", + translation_key="domestic_hot_water_pump", icon="mdi:pump", device_class=BinarySensorDeviceClass.RUNNING, value_getter=lambda api: api.getDomesticHotWaterPumpActive(), @@ -103,45 +107,67 @@ GLOBAL_SENSORS: tuple[ViCareBinarySensorEntityDescription, ...] = ( ) -def _build_entity( - name: str, - vicare_api, +def _build_entities( + device: PyViCareDevice, device_config: PyViCareDeviceConfig, - entity_description: ViCareBinarySensorEntityDescription, -): - """Create a ViCare binary sensor entity.""" - if is_supported(name, entity_description, vicare_api): - return ViCareBinarySensor( - name, - vicare_api, - device_config, - entity_description, +) -> list[ViCareBinarySensor]: + """Create ViCare binary sensor entities for a device.""" + + entities: list[ViCareBinarySensor] = _build_entities_for_device( + device, device_config + ) + entities.extend( + _build_entities_for_component( + get_circuits(device), device_config, CIRCUIT_SENSORS ) - return None + ) + entities.extend( + _build_entities_for_component( + get_burners(device), device_config, BURNER_SENSORS + ) + ) + entities.extend( + _build_entities_for_component( + get_compressors(device), device_config, COMPRESSOR_SENSORS + ) + ) + return entities -async def _entities_from_descriptions( - hass: HomeAssistant, - entities: list[ViCareBinarySensor], - sensor_descriptions: tuple[ViCareBinarySensorEntityDescription, ...], - iterables, - config_entry: ConfigEntry, -) -> None: - """Create entities from descriptions and list of burners/circuits.""" - for description in sensor_descriptions: - for current in iterables: - suffix = "" - if len(iterables) > 1: - suffix = f" {current.id}" - entity = await hass.async_add_executor_job( - _build_entity, - f"{description.name}{suffix}", - current, - hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG], - description, - ) - if entity is not None: - entities.append(entity) +def _build_entities_for_device( + device: PyViCareDevice, + device_config: PyViCareDeviceConfig, +) -> list[ViCareBinarySensor]: + """Create device specific ViCare binary sensor entities.""" + + return [ + ViCareBinarySensor( + device, + device_config, + description, + ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device) + ] + + +def _build_entities_for_component( + components: list[PyViCareHeatingDeviceWithComponent], + device_config: PyViCareDeviceConfig, + entity_descriptions: tuple[ViCareBinarySensorEntityDescription, ...], +) -> list[ViCareBinarySensor]: + """Create component specific ViCare binary sensor entities.""" + + return [ + ViCareBinarySensor( + component, + device_config, + description, + ) + for component in components + for description in entity_descriptions + if is_supported(description.key, description, component) + ] async def async_setup_entry( @@ -151,42 +177,15 @@ async def async_setup_entry( ) -> None: """Create the ViCare binary sensor devices.""" api = hass.data[DOMAIN][config_entry.entry_id][VICARE_API] + device_config = hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG] - entities = [] - - for description in GLOBAL_SENSORS: - entity = await hass.async_add_executor_job( - _build_entity, - description.name, + async_add_entities( + await hass.async_add_executor_job( + _build_entities, api, - hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG], - description, + device_config, ) - if entity is not None: - entities.append(entity) - - try: - await _entities_from_descriptions( - hass, entities, CIRCUIT_SENSORS, api.circuits, config_entry - ) - except PyViCareNotSupportedFeatureError: - _LOGGER.info("No circuits found") - - try: - await _entities_from_descriptions( - hass, entities, BURNER_SENSORS, api.burners, config_entry - ) - except PyViCareNotSupportedFeatureError: - _LOGGER.info("No burners found") - - try: - await _entities_from_descriptions( - hass, entities, COMPRESSOR_SENSORS, api.compressors, config_entry - ) - except PyViCareNotSupportedFeatureError: - _LOGGER.info("No compressors found") - - async_add_entities(entities) + ) class ViCareBinarySensor(ViCareEntity, BinarySensorEntity): @@ -195,31 +194,21 @@ class ViCareBinarySensor(ViCareEntity, BinarySensorEntity): entity_description: ViCareBinarySensorEntityDescription def __init__( - self, name, api, device_config, description: ViCareBinarySensorEntityDescription + self, + api: PyViCareDevice, + device_config: PyViCareDeviceConfig, + description: ViCareBinarySensorEntityDescription, ) -> None: """Initialize the sensor.""" - super().__init__(device_config) + super().__init__(device_config, api, description.key) self.entity_description = description - self._attr_name = name - self._api = api - self._device_config = device_config @property - def available(self): + def available(self) -> bool: """Return True if entity is available.""" return self._attr_is_on is not None - @property - def unique_id(self) -> str: - """Return unique ID for this device.""" - tmp_id = ( - f"{self._device_config.getConfig().serial}-{self.entity_description.key}" - ) - if hasattr(self._api, "id"): - return f"{tmp_id}-{self._api.id}" - return tmp_id - - def update(self): + def update(self) -> None: """Update state of sensor.""" try: with suppress(PyViCareNotSupportedFeatureError): diff --git a/homeassistant/components/vicare/button.py b/homeassistant/components/vicare/button.py index 2516446a94e..374d98b3397 100644 --- a/homeassistant/components/vicare/button.py +++ b/homeassistant/components/vicare/button.py @@ -5,6 +5,7 @@ from contextlib import suppress from dataclasses import dataclass import logging +from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, @@ -26,8 +27,6 @@ from .utils import is_supported _LOGGER = logging.getLogger(__name__) -BUTTON_DHW_ACTIVATE_ONETIME_CHARGE = "activate_onetimecharge" - @dataclass class ViCareButtonEntityDescription( @@ -38,8 +37,8 @@ class ViCareButtonEntityDescription( BUTTON_DESCRIPTIONS: tuple[ViCareButtonEntityDescription, ...] = ( ViCareButtonEntityDescription( - key=BUTTON_DHW_ACTIVATE_ONETIME_CHARGE, - name="Activate one-time charge", + key="activate_onetimecharge", + translation_key="activate_onetimecharge", icon="mdi:shower-head", entity_category=EntityCategory.CONFIG, value_getter=lambda api: api.getOneTimeCharge(), @@ -48,22 +47,21 @@ BUTTON_DESCRIPTIONS: tuple[ViCareButtonEntityDescription, ...] = ( ) -def _build_entity( - name: str, - vicare_api, +def _build_entities( + api: PyViCareDevice, device_config: PyViCareDeviceConfig, - entity_description: ViCareButtonEntityDescription, -): - """Create a ViCare button entity.""" - _LOGGER.debug("Found device %s", name) - if is_supported(name, entity_description, vicare_api): - return ViCareButton( - name, - vicare_api, +) -> list[ViCareButton]: + """Create ViCare button entities for a device.""" + + return [ + ViCareButton( + api, device_config, - entity_description, + description, ) - return None + for description in BUTTON_DESCRIPTIONS + if is_supported(description.key, description, api) + ] async def async_setup_entry( @@ -73,21 +71,15 @@ async def async_setup_entry( ) -> None: """Create the ViCare button entities.""" api = hass.data[DOMAIN][config_entry.entry_id][VICARE_API] + device_config = hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG] - entities = [] - - for description in BUTTON_DESCRIPTIONS: - entity = await hass.async_add_executor_job( - _build_entity, - description.name, + async_add_entities( + await hass.async_add_executor_job( + _build_entities, api, - hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG], - description, + device_config, ) - if entity is not None: - entities.append(entity) - - async_add_entities(entities) + ) class ViCareButton(ViCareEntity, ButtonEntity): @@ -96,13 +88,14 @@ class ViCareButton(ViCareEntity, ButtonEntity): entity_description: ViCareButtonEntityDescription def __init__( - self, name, api, device_config, description: ViCareButtonEntityDescription + self, + api: PyViCareDevice, + device_config: PyViCareDeviceConfig, + description: ViCareButtonEntityDescription, ) -> None: """Initialize the button.""" - super().__init__(device_config) + super().__init__(device_config, api, description.key) self.entity_description = description - self._device_config = device_config - self._api = api def press(self) -> None: """Handle the button press.""" @@ -117,13 +110,3 @@ class ViCareButton(ViCareEntity, ButtonEntity): _LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception) except PyViCareInvalidDataError as invalid_data_exception: _LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception) - - @property - def unique_id(self) -> str: - """Return unique ID for this device.""" - tmp_id = ( - f"{self._device_config.getConfig().serial}-{self.entity_description.key}" - ) - if hasattr(self._api, "id"): - return f"{tmp_id}-{self._api.id}" - return tmp_id diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index d306cc6604d..c14f940ffe6 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -5,6 +5,9 @@ from contextlib import suppress import logging from typing import Any +from PyViCare.PyViCareDevice import Device as PyViCareDevice +from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareHeatingDevice import HeatingCircuit as PyViCareHeatingCircuit from PyViCare.PyViCareUtils import ( PyViCareCommandError, PyViCareInvalidDataError, @@ -31,12 +34,14 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, VICARE_API, VICARE_DEVICE_CONFIG from .entity import ViCareEntity +from .utils import get_burners, get_circuits, get_compressors _LOGGER = logging.getLogger(__name__) @@ -90,13 +95,20 @@ HA_TO_VICARE_PRESET_HEATING = { } -def _get_circuits(vicare_api): - """Return the list of circuits.""" - try: - return vicare_api.circuits - except PyViCareNotSupportedFeatureError: - _LOGGER.info("No circuits found") - return [] +def _build_entities( + api: PyViCareDevice, + device_config: PyViCareDeviceConfig, +) -> list[ViCareClimate]: + """Create ViCare climate entities for a device.""" + return [ + ViCareClimate( + api, + circuit, + device_config, + "heating", + ) + for circuit in get_circuits(api) + ] async def async_setup_entry( @@ -105,22 +117,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the ViCare climate platform.""" - entities = [] api = hass.data[DOMAIN][config_entry.entry_id][VICARE_API] - circuits = await hass.async_add_executor_job(_get_circuits, api) - - for circuit in circuits: - suffix = "" - if len(circuits) > 1: - suffix = f" {circuit.id}" - - entity = ViCareClimate( - f"Heating{suffix}", - api, - circuit, - hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG], - ) - entities.append(entity) + device_config = hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG] platform = entity_platform.async_get_current_platform() @@ -130,7 +128,13 @@ async def async_setup_entry( "set_vicare_mode", ) - async_add_entities(entities) + async_add_entities( + await hass.async_add_executor_job( + _build_entities, + api, + device_config, + ) + ) class ViCareClimate(ViCareEntity, ClimateEntity): @@ -148,15 +152,19 @@ class ViCareClimate(ViCareEntity, ClimateEntity): _current_action: bool | None = None _current_mode: str | None = None - def __init__(self, name, api, circuit, device_config) -> None: + def __init__( + self, + api: PyViCareDevice, + circuit: PyViCareHeatingCircuit, + device_config: PyViCareDeviceConfig, + translation_key: str, + ) -> None: """Initialize the climate device.""" - super().__init__(device_config) - self._attr_name = name - self._api = api + super().__init__(device_config, api, circuit.id) self._circuit = circuit self._attributes: dict[str, Any] = {} self._current_program = None - self._attr_unique_id = f"{device_config.getConfig().serial}-{circuit.id}" + self._attr_translation_key = translation_key def update(self) -> None: """Let HA know there has been an update from the ViCare API.""" @@ -209,11 +217,11 @@ class ViCareClimate(ViCareEntity, ClimateEntity): self._current_action = False # Update the specific device attributes with suppress(PyViCareNotSupportedFeatureError): - for burner in self._api.burners: + for burner in get_burners(self._api): self._current_action = self._current_action or burner.getActive() with suppress(PyViCareNotSupportedFeatureError): - for compressor in self._api.compressors: + for compressor in get_compressors(self._api): self._current_action = ( self._current_action or compressor.getActive() ) @@ -292,22 +300,45 @@ class ViCareClimate(ViCareEntity, ClimateEntity): def set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode and deactivate any existing programs.""" - vicare_program = HA_TO_VICARE_PRESET_HEATING.get(preset_mode) - if vicare_program is None: - raise ValueError( - f"Cannot set invalid vicare program: {preset_mode}/{vicare_program}" + target_program = HA_TO_VICARE_PRESET_HEATING.get(preset_mode) + if target_program is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="program_unknown", + translation_placeholders={ + "preset": preset_mode, + }, ) - _LOGGER.debug("Setting preset to %s / %s", preset_mode, vicare_program) - if self._current_program != VICARE_PROGRAM_NORMAL: + _LOGGER.debug("Current preset %s", self._current_program) + if self._current_program and self._current_program != VICARE_PROGRAM_NORMAL: # We can't deactivate "normal" + _LOGGER.debug("deactivating %s", self._current_program) try: self._circuit.deactivateProgram(self._current_program) - except PyViCareCommandError: - _LOGGER.debug("Unable to deactivate program %s", self._current_program) - if vicare_program != VICARE_PROGRAM_NORMAL: - # And we can't explicitly activate normal, either - self._circuit.activateProgram(vicare_program) + except PyViCareCommandError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="program_not_deactivated", + translation_placeholders={ + "program": self._current_program, + }, + ) from err + + _LOGGER.debug("Setting preset to %s / %s", preset_mode, target_program) + if target_program != VICARE_PROGRAM_NORMAL: + # And we can't explicitly activate "normal", either + _LOGGER.debug("activating %s", target_program) + try: + self._circuit.activateProgram(target_program) + except PyViCareCommandError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="program_not_activated", + translation_placeholders={ + "program": target_program, + }, + ) from err @property def extra_state_attributes(self): diff --git a/homeassistant/components/vicare/const.py b/homeassistant/components/vicare/const.py index 546f18985e8..3ed81ab587a 100644 --- a/homeassistant/components/vicare/const.py +++ b/homeassistant/components/vicare/const.py @@ -6,10 +6,11 @@ from homeassistant.const import Platform, UnitOfEnergy, UnitOfVolume DOMAIN = "vicare" PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, + Platform.NUMBER, Platform.SENSOR, - Platform.BINARY_SENSOR, Platform.WATER_HEATER, ] diff --git a/homeassistant/components/vicare/entity.py b/homeassistant/components/vicare/entity.py index 089f9c062b8..af35c7bf8dd 100644 --- a/homeassistant/components/vicare/entity.py +++ b/homeassistant/components/vicare/entity.py @@ -1,4 +1,7 @@ """Entities for the ViCare integration.""" +from PyViCare.PyViCareDevice import Device as PyViCareDevice +from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig + from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity @@ -10,8 +13,19 @@ class ViCareEntity(Entity): _attr_has_entity_name = True - def __init__(self, device_config) -> None: + def __init__( + self, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + unique_id_suffix: str, + ) -> None: """Initialize the entity.""" + self._api = device + + self._attr_unique_id = f"{device_config.getConfig().serial}-{unique_id_suffix}" + # valid for compressors, circuits, burners (HeatingDeviceWithComponent) + if hasattr(device, "id"): + self._attr_unique_id += f"-{device.id}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, device_config.getConfig().serial)}, diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index d71ccdbb12c..cbde6242082 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.28.1"] + "requirements": ["PyViCare==2.29.0"] } diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py new file mode 100644 index 00000000000..5511f2a5294 --- /dev/null +++ b/homeassistant/components/vicare/number.py @@ -0,0 +1,180 @@ +"""Number for ViCare.""" +from __future__ import annotations + +from collections.abc import Callable +from contextlib import suppress +from dataclasses import dataclass +import logging +from typing import Any + +from PyViCare.PyViCareDevice import Device as PyViCareDevice +from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareHeatingDevice import ( + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, +) +from PyViCare.PyViCareUtils import ( + PyViCareInvalidDataError, + PyViCareNotSupportedFeatureError, + PyViCareRateLimitError, +) +from requests.exceptions import ConnectionError as RequestConnectionError + +from homeassistant.components.number import NumberEntity, NumberEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ViCareRequiredKeysMixin +from .const import DOMAIN, VICARE_API, VICARE_DEVICE_CONFIG +from .entity import ViCareEntity +from .utils import get_circuits, is_supported + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class ViCareNumberEntityDescription(NumberEntityDescription, ViCareRequiredKeysMixin): + """Describes ViCare number entity.""" + + value_setter: Callable[[PyViCareDevice, float], Any] | None = None + min_value_getter: Callable[[PyViCareDevice], float | None] | None = None + max_value_getter: Callable[[PyViCareDevice], float | None] | None = None + stepping_getter: Callable[[PyViCareDevice], float | None] | None = None + + +CIRCUIT_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( + ViCareNumberEntityDescription( + key="heating curve shift", + translation_key="heating_curve_shift", + icon="mdi:plus-minus-variant", + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getHeatingCurveShift(), + value_setter=lambda api, shift: ( + api.setHeatingCurve(shift, api.getHeatingCurveSlope()) + ), + min_value_getter=lambda api: api.getHeatingCurveShiftMin(), + max_value_getter=lambda api: api.getHeatingCurveShiftMax(), + stepping_getter=lambda api: api.getHeatingCurveShiftStepping(), + native_min_value=-13, + native_max_value=40, + native_step=1, + ), + ViCareNumberEntityDescription( + key="heating curve slope", + translation_key="heating_curve_slope", + icon="mdi:slope-uphill", + entity_category=EntityCategory.CONFIG, + value_getter=lambda api: api.getHeatingCurveSlope(), + value_setter=lambda api, slope: ( + api.setHeatingCurve(api.getHeatingCurveShift(), slope) + ), + min_value_getter=lambda api: api.getHeatingCurveSlopeMin(), + max_value_getter=lambda api: api.getHeatingCurveSlopeMax(), + stepping_getter=lambda api: api.getHeatingCurveSlopeStepping(), + native_min_value=0.2, + native_max_value=3.5, + native_step=0.1, + ), +) + + +def _build_entities( + api: PyViCareDevice, + device_config: PyViCareDeviceConfig, +) -> list[ViCareNumber]: + """Create ViCare number entities for a component.""" + + return [ + ViCareNumber( + circuit, + device_config, + description, + ) + for circuit in get_circuits(api) + for description in CIRCUIT_ENTITY_DESCRIPTIONS + if is_supported(description.key, description, circuit) + ] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Create the ViCare number devices.""" + api = hass.data[DOMAIN][config_entry.entry_id][VICARE_API] + device_config = hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG] + + async_add_entities( + await hass.async_add_executor_job( + _build_entities, + api, + device_config, + ) + ) + + +class ViCareNumber(ViCareEntity, NumberEntity): + """Representation of a ViCare number.""" + + entity_description: ViCareNumberEntityDescription + + def __init__( + self, + api: PyViCareHeatingDeviceComponent, + device_config: PyViCareDeviceConfig, + description: ViCareNumberEntityDescription, + ) -> None: + """Initialize the number.""" + super().__init__(device_config, api, description.key) + self.entity_description = description + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return self._attr_native_value is not None + + def set_native_value(self, value: float) -> None: + """Set new value.""" + if self.entity_description.value_setter: + self.entity_description.value_setter(self._api, value) + self.schedule_update_ha_state() + + def update(self) -> None: + """Update state of number.""" + try: + with suppress(PyViCareNotSupportedFeatureError): + self._attr_native_value = self.entity_description.value_getter( + self._api + ) + if min_value := _get_value( + self.entity_description.min_value_getter, self._api + ): + self._attr_native_min_value = min_value + + if max_value := _get_value( + self.entity_description.max_value_getter, self._api + ): + self._attr_native_max_value = max_value + + if stepping_value := _get_value( + self.entity_description.stepping_getter, self._api + ): + self._attr_native_step = stepping_value + except RequestConnectionError: + _LOGGER.error("Unable to retrieve data from ViCare server") + except ValueError: + _LOGGER.error("Unable to decode data from ViCare server") + except PyViCareRateLimitError as limit_exception: + _LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception) + except PyViCareInvalidDataError as invalid_data_exception: + _LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception) + + +def _get_value( + fn: Callable[[PyViCareDevice], float | None] | None, + api: PyViCareHeatingDeviceComponent, +) -> float | None: + return None if fn is None else fn(api) diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 325f3bf2d07..875d8790c52 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -6,8 +6,11 @@ from contextlib import suppress from dataclasses import dataclass import logging -from PyViCare.PyViCareDevice import Device +from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareHeatingDevice import ( + HeatingDeviceWithComponent as PyViCareHeatingDeviceWithComponent, +) from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, PyViCareNotSupportedFeatureError, @@ -24,11 +27,13 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, + EntityCategory, UnitOfEnergy, UnitOfPower, UnitOfTemperature, UnitOfTime, UnitOfVolume, + UnitOfVolumeFlowRate, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -43,7 +48,7 @@ from .const import ( VICARE_UNIT_TO_UNIT_OF_MEASUREMENT, ) from .entity import ViCareEntity -from .utils import is_supported +from .utils import get_burners, get_circuits, get_compressors, is_supported _LOGGER = logging.getLogger(__name__) @@ -57,13 +62,13 @@ VICARE_UNIT_TO_DEVICE_CLASS = { class ViCareSensorEntityDescription(SensorEntityDescription, ViCareRequiredKeysMixin): """Describes ViCare sensor entity.""" - unit_getter: Callable[[Device], str | None] | None = None + unit_getter: Callable[[PyViCareDevice], str | None] | None = None GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ViCareSensorEntityDescription( key="outside_temperature", - name="Outside Temperature", + translation_key="outside_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getOutsideTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -71,7 +76,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="return_temperature", - name="Return Temperature", + translation_key="return_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getReturnTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -79,7 +84,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="boiler_temperature", - name="Boiler Temperature", + translation_key="boiler_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getBoilerTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -87,7 +92,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="boiler_supply_temperature", - name="Boiler Supply Temperature", + translation_key="boiler_supply_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getBoilerCommonSupplyTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -95,7 +100,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="primary_circuit_supply_temperature", - name="Primary Circuit Supply Temperature", + translation_key="primary_circuit_supply_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getSupplyTemperaturePrimaryCircuit(), device_class=SensorDeviceClass.TEMPERATURE, @@ -103,7 +108,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="primary_circuit_return_temperature", - name="Primary Circuit Return Temperature", + translation_key="primary_circuit_return_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getReturnTemperaturePrimaryCircuit(), device_class=SensorDeviceClass.TEMPERATURE, @@ -111,7 +116,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="secondary_circuit_supply_temperature", - name="Secondary Circuit Supply Temperature", + translation_key="secondary_circuit_supply_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getSupplyTemperatureSecondaryCircuit(), device_class=SensorDeviceClass.TEMPERATURE, @@ -119,7 +124,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="secondary_circuit_return_temperature", - name="Secondary Circuit Return Temperature", + translation_key="secondary_circuit_return_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getReturnTemperatureSecondaryCircuit(), device_class=SensorDeviceClass.TEMPERATURE, @@ -127,7 +132,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="hotwater_out_temperature", - name="Hot Water Out Temperature", + translation_key="hotwater_out_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getDomesticHotWaterOutletTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -135,7 +140,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="hotwater_max_temperature", - name="Hot Water Max Temperature", + translation_key="hotwater_max_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getDomesticHotWaterMaxTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -143,7 +148,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="hotwater_min_temperature", - name="Hot Water Min Temperature", + translation_key="hotwater_min_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getDomesticHotWaterMinTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -151,63 +156,63 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="hotwater_gas_consumption_today", - name="Hot water gas consumption today", + translation_key="hotwater_gas_consumption_today", value_getter=lambda api: api.getGasConsumptionDomesticHotWaterToday(), unit_getter=lambda api: api.getGasConsumptionDomesticHotWaterUnit(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="hotwater_gas_consumption_heating_this_week", - name="Hot water gas consumption this week", + translation_key="hotwater_gas_consumption_heating_this_week", value_getter=lambda api: api.getGasConsumptionDomesticHotWaterThisWeek(), unit_getter=lambda api: api.getGasConsumptionDomesticHotWaterUnit(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="hotwater_gas_consumption_heating_this_month", - name="Hot water gas consumption this month", + translation_key="hotwater_gas_consumption_heating_this_month", value_getter=lambda api: api.getGasConsumptionDomesticHotWaterThisMonth(), unit_getter=lambda api: api.getGasConsumptionDomesticHotWaterUnit(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="hotwater_gas_consumption_heating_this_year", - name="Hot water gas consumption this year", + translation_key="hotwater_gas_consumption_heating_this_year", value_getter=lambda api: api.getGasConsumptionDomesticHotWaterThisYear(), unit_getter=lambda api: api.getGasConsumptionDomesticHotWaterUnit(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="gas_consumption_heating_today", - name="Heating gas consumption today", + translation_key="gas_consumption_heating_today", value_getter=lambda api: api.getGasConsumptionHeatingToday(), unit_getter=lambda api: api.getGasConsumptionHeatingUnit(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="gas_consumption_heating_this_week", - name="Heating gas consumption this week", + translation_key="gas_consumption_heating_this_week", value_getter=lambda api: api.getGasConsumptionHeatingThisWeek(), unit_getter=lambda api: api.getGasConsumptionHeatingUnit(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="gas_consumption_heating_this_month", - name="Heating gas consumption this month", + translation_key="gas_consumption_heating_this_month", value_getter=lambda api: api.getGasConsumptionHeatingThisMonth(), unit_getter=lambda api: api.getGasConsumptionHeatingUnit(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="gas_consumption_heating_this_year", - name="Heating gas consumption this year", + translation_key="gas_consumption_heating_this_year", value_getter=lambda api: api.getGasConsumptionHeatingThisYear(), unit_getter=lambda api: api.getGasConsumptionHeatingUnit(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="gas_summary_consumption_heating_currentday", - name="Heating gas consumption current day", + translation_key="gas_summary_consumption_heating_currentday", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, value_getter=lambda api: api.getGasSummaryConsumptionHeatingCurrentDay(), unit_getter=lambda api: api.getGasSummaryConsumptionHeatingUnit(), @@ -215,7 +220,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="gas_summary_consumption_heating_currentmonth", - name="Heating gas consumption current month", + translation_key="gas_summary_consumption_heating_currentmonth", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, value_getter=lambda api: api.getGasSummaryConsumptionHeatingCurrentMonth(), unit_getter=lambda api: api.getGasSummaryConsumptionHeatingUnit(), @@ -223,7 +228,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="gas_summary_consumption_heating_currentyear", - name="Heating gas consumption current year", + translation_key="gas_summary_consumption_heating_currentyear", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, value_getter=lambda api: api.getGasSummaryConsumptionHeatingCurrentYear(), unit_getter=lambda api: api.getGasSummaryConsumptionHeatingUnit(), @@ -231,7 +236,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="gas_summary_consumption_heating_lastsevendays", - name="Heating gas consumption last seven days", + translation_key="gas_summary_consumption_heating_lastsevendays", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, value_getter=lambda api: api.getGasSummaryConsumptionHeatingLastSevenDays(), unit_getter=lambda api: api.getGasSummaryConsumptionHeatingUnit(), @@ -239,7 +244,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="hotwater_gas_summary_consumption_heating_currentday", - name="Hot water gas consumption current day", + translation_key="hotwater_gas_summary_consumption_heating_currentday", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, value_getter=lambda api: api.getGasSummaryConsumptionDomesticHotWaterCurrentDay(), unit_getter=lambda api: api.getGasSummaryConsumptionDomesticHotWaterUnit(), @@ -247,7 +252,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="hotwater_gas_summary_consumption_heating_currentmonth", - name="Hot water gas consumption current month", + translation_key="hotwater_gas_summary_consumption_heating_currentmonth", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, value_getter=lambda api: api.getGasSummaryConsumptionDomesticHotWaterCurrentMonth(), unit_getter=lambda api: api.getGasSummaryConsumptionDomesticHotWaterUnit(), @@ -255,7 +260,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="hotwater_gas_summary_consumption_heating_currentyear", - name="Hot water gas consumption current year", + translation_key="hotwater_gas_summary_consumption_heating_currentyear", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, value_getter=lambda api: api.getGasSummaryConsumptionDomesticHotWaterCurrentYear(), unit_getter=lambda api: api.getGasSummaryConsumptionDomesticHotWaterUnit(), @@ -263,7 +268,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="hotwater_gas_summary_consumption_heating_lastsevendays", - name="Hot water gas consumption last seven days", + translation_key="hotwater_gas_summary_consumption_heating_lastsevendays", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, value_getter=lambda api: api.getGasSummaryConsumptionDomesticHotWaterLastSevenDays(), unit_getter=lambda api: api.getGasSummaryConsumptionDomesticHotWaterUnit(), @@ -271,7 +276,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="energy_summary_consumption_heating_currentday", - name="Energy consumption of gas heating current day", + translation_key="energy_summary_consumption_heating_currentday", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerSummaryConsumptionHeatingCurrentDay(), unit_getter=lambda api: api.getPowerSummaryConsumptionHeatingUnit(), @@ -279,7 +284,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="energy_summary_consumption_heating_currentmonth", - name="Energy consumption of gas heating current month", + translation_key="energy_summary_consumption_heating_currentmonth", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerSummaryConsumptionHeatingCurrentMonth(), unit_getter=lambda api: api.getPowerSummaryConsumptionHeatingUnit(), @@ -287,7 +292,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="energy_summary_consumption_heating_currentyear", - name="Energy consumption of gas heating current year", + translation_key="energy_summary_consumption_heating_currentyear", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerSummaryConsumptionHeatingCurrentYear(), unit_getter=lambda api: api.getPowerSummaryConsumptionHeatingUnit(), @@ -295,7 +300,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="energy_summary_consumption_heating_lastsevendays", - name="Energy consumption of gas heating last seven days", + translation_key="energy_summary_consumption_heating_lastsevendays", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerSummaryConsumptionHeatingLastSevenDays(), unit_getter=lambda api: api.getPowerSummaryConsumptionHeatingUnit(), @@ -303,7 +308,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="energy_dhw_summary_consumption_heating_currentday", - name="Energy consumption of hot water gas heating current day", + translation_key="energy_dhw_summary_consumption_heating_currentday", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerSummaryConsumptionDomesticHotWaterCurrentDay(), unit_getter=lambda api: api.getPowerSummaryConsumptionDomesticHotWaterUnit(), @@ -311,7 +316,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="energy_dhw_summary_consumption_heating_currentmonth", - name="Energy consumption of hot water gas heating current month", + translation_key="energy_dhw_summary_consumption_heating_currentmonth", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerSummaryConsumptionDomesticHotWaterCurrentMonth(), unit_getter=lambda api: api.getPowerSummaryConsumptionDomesticHotWaterUnit(), @@ -319,7 +324,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="energy_dhw_summary_consumption_heating_currentyear", - name="Energy consumption of hot water gas heating current year", + translation_key="energy_dhw_summary_consumption_heating_currentyear", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerSummaryConsumptionDomesticHotWaterCurrentYear(), unit_getter=lambda api: api.getPowerSummaryConsumptionDomesticHotWaterUnit(), @@ -327,7 +332,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="energy_summary_dhw_consumption_heating_lastsevendays", - name="Energy consumption of hot water gas heating last seven days", + translation_key="energy_summary_dhw_consumption_heating_lastsevendays", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerSummaryConsumptionDomesticHotWaterLastSevenDays(), unit_getter=lambda api: api.getPowerSummaryConsumptionDomesticHotWaterUnit(), @@ -335,7 +340,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power_production_current", - name="Power production current", + translation_key="power_production_current", native_unit_of_measurement=UnitOfPower.WATT, value_getter=lambda api: api.getPowerProductionCurrent(), device_class=SensorDeviceClass.POWER, @@ -343,7 +348,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power_production_today", - name="Energy production today", + translation_key="power_production_today", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerProductionToday(), device_class=SensorDeviceClass.ENERGY, @@ -351,7 +356,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power_production_this_week", - name="Energy production this week", + translation_key="power_production_this_week", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerProductionThisWeek(), device_class=SensorDeviceClass.ENERGY, @@ -359,7 +364,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power_production_this_month", - name="Energy production this month", + translation_key="power_production_this_month", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerProductionThisMonth(), device_class=SensorDeviceClass.ENERGY, @@ -367,7 +372,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power_production_this_year", - name="Energy production this year", + translation_key="power_production_this_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerProductionThisYear(), device_class=SensorDeviceClass.ENERGY, @@ -375,7 +380,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="solar storage temperature", - name="Solar Storage Temperature", + translation_key="solar_storage_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getSolarStorageTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -383,7 +388,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="collector temperature", - name="Solar Collector Temperature", + translation_key="collector_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getSolarCollectorTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -391,7 +396,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="solar power production today", - name="Solar energy production today", + translation_key="solar_power_production_today", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getSolarPowerProductionToday(), unit_getter=lambda api: api.getSolarPowerProductionUnit(), @@ -400,7 +405,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="solar power production this week", - name="Solar energy production this week", + translation_key="solar_power_production_this_week", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getSolarPowerProductionThisWeek(), unit_getter=lambda api: api.getSolarPowerProductionUnit(), @@ -409,7 +414,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="solar power production this month", - name="Solar energy production this month", + translation_key="solar_power_production_this_month", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getSolarPowerProductionThisMonth(), unit_getter=lambda api: api.getSolarPowerProductionUnit(), @@ -418,7 +423,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="solar power production this year", - name="Solar energy production this year", + translation_key="solar_power_production_this_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getSolarPowerProductionThisYear(), unit_getter=lambda api: api.getSolarPowerProductionUnit(), @@ -427,7 +432,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power consumption today", - name="Energy consumption today", + translation_key="power_consumption_today", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerConsumptionToday(), unit_getter=lambda api: api.getPowerConsumptionUnit(), @@ -436,7 +441,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power consumption this week", - name="Power consumption this week", + translation_key="power_consumption_this_week", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerConsumptionThisWeek(), unit_getter=lambda api: api.getPowerConsumptionUnit(), @@ -445,7 +450,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power consumption this month", - name="Energy consumption this month", + translation_key="power consumption this month", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerConsumptionThisMonth(), unit_getter=lambda api: api.getPowerConsumptionUnit(), @@ -454,7 +459,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="power consumption this year", - name="Energy consumption this year", + translation_key="power_consumption_this_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_getter=lambda api: api.getPowerConsumptionThisYear(), unit_getter=lambda api: api.getPowerConsumptionUnit(), @@ -463,7 +468,7 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="buffer top temperature", - name="Buffer top temperature", + translation_key="buffer_top_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getBufferTopTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -471,18 +476,27 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="buffer main temperature", - name="Buffer main temperature", + translation_key="buffer_main_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getBufferMainTemperature(), device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), + ViCareSensorEntityDescription( + key="volumetric_flow", + translation_key="volumetric_flow", + icon="mdi:gauge", + native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + value_getter=lambda api: api.getVolumetricFlowReturn() / 1000, + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + ), ) CIRCUIT_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ViCareSensorEntityDescription( key="supply_temperature", - name="Supply Temperature", + translation_key="supply_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, value_getter=lambda api: api.getSupplyTemperature(), device_class=SensorDeviceClass.TEMPERATURE, @@ -493,14 +507,14 @@ CIRCUIT_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( BURNER_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ViCareSensorEntityDescription( key="burner_starts", - name="Burner Starts", + translation_key="burner_starts", icon="mdi:counter", value_getter=lambda api: api.getStarts(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="burner_hours", - name="Burner Hours", + translation_key="burner_hours", icon="mdi:counter", native_unit_of_measurement=UnitOfTime.HOURS, value_getter=lambda api: api.getHours(), @@ -508,7 +522,7 @@ BURNER_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="burner_modulation", - name="Burner Modulation", + translation_key="burner_modulation", icon="mdi:percent", native_unit_of_measurement=PERCENTAGE, value_getter=lambda api: api.getModulation(), @@ -519,14 +533,14 @@ BURNER_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( COMPRESSOR_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ViCareSensorEntityDescription( key="compressor_starts", - name="Compressor Starts", + translation_key="compressor_starts", icon="mdi:counter", value_getter=lambda api: api.getStarts(), state_class=SensorStateClass.TOTAL_INCREASING, ), ViCareSensorEntityDescription( key="compressor_hours", - name="Compressor Hours", + translation_key="compressor_hours", icon="mdi:counter", native_unit_of_measurement=UnitOfTime.HOURS, value_getter=lambda api: api.getHours(), @@ -534,7 +548,7 @@ COMPRESSOR_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="compressor_hours_loadclass1", - name="Compressor Hours Load Class 1", + translation_key="compressor_hours_loadclass1", icon="mdi:counter", native_unit_of_measurement=UnitOfTime.HOURS, value_getter=lambda api: api.getHoursLoadClass1(), @@ -542,7 +556,7 @@ COMPRESSOR_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="compressor_hours_loadclass2", - name="Compressor Hours Load Class 2", + translation_key="compressor_hours_loadclass2", icon="mdi:counter", native_unit_of_measurement=UnitOfTime.HOURS, value_getter=lambda api: api.getHoursLoadClass2(), @@ -550,7 +564,7 @@ COMPRESSOR_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="compressor_hours_loadclass3", - name="Compressor Hours Load Class 3", + translation_key="compressor_hours_loadclass3", icon="mdi:counter", native_unit_of_measurement=UnitOfTime.HOURS, value_getter=lambda api: api.getHoursLoadClass3(), @@ -558,7 +572,7 @@ COMPRESSOR_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="compressor_hours_loadclass4", - name="Compressor Hours Load Class 4", + translation_key="compressor_hours_loadclass4", icon="mdi:counter", native_unit_of_measurement=UnitOfTime.HOURS, value_getter=lambda api: api.getHoursLoadClass4(), @@ -566,26 +580,30 @@ COMPRESSOR_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ViCareSensorEntityDescription( key="compressor_hours_loadclass5", - name="Compressor Hours Load Class 5", + translation_key="compressor_hours_loadclass5", icon="mdi:counter", native_unit_of_measurement=UnitOfTime.HOURS, value_getter=lambda api: api.getHoursLoadClass5(), state_class=SensorStateClass.TOTAL_INCREASING, ), + ViCareSensorEntityDescription( + key="compressor_phase", + translation_key="compressor_phase", + icon="mdi:information", + value_getter=lambda api: api.getPhase(), + entity_category=EntityCategory.DIAGNOSTIC, + ), ) def _build_entity( - name: str, vicare_api, device_config: PyViCareDeviceConfig, entity_description: ViCareSensorEntityDescription, ): """Create a ViCare sensor entity.""" - _LOGGER.debug("Found device %s", name) - if is_supported(name, entity_description, vicare_api): + if is_supported(entity_description.key, entity_description, vicare_api): return ViCareSensor( - name, vicare_api, device_config, entity_description, @@ -603,62 +621,95 @@ async def _entities_from_descriptions( """Create entities from descriptions and list of burners/circuits.""" for description in sensor_descriptions: for current in iterables: - suffix = "" - if len(iterables) > 1: - suffix = f" {current.id}" entity = await hass.async_add_executor_job( _build_entity, - f"{description.name}{suffix}", current, hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG], description, ) - if entity is not None: + if entity: entities.append(entity) +def _build_entities( + device: PyViCareDevice, + device_config: PyViCareDeviceConfig, +) -> list[ViCareSensor]: + """Create ViCare sensor entities for a device.""" + + entities: list[ViCareSensor] = _build_entities_for_device(device, device_config) + entities.extend( + _build_entities_for_component( + get_circuits(device), device_config, CIRCUIT_SENSORS + ) + ) + entities.extend( + _build_entities_for_component( + get_burners(device), device_config, BURNER_SENSORS + ) + ) + entities.extend( + _build_entities_for_component( + get_compressors(device), device_config, COMPRESSOR_SENSORS + ) + ) + return entities + + +def _build_entities_for_device( + device: PyViCareDevice, + device_config: PyViCareDeviceConfig, +) -> list[ViCareSensor]: + """Create device specific ViCare sensor entities.""" + + return [ + ViCareSensor( + device, + device_config, + description, + ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device) + ] + + +def _build_entities_for_component( + components: list[PyViCareHeatingDeviceWithComponent], + device_config: PyViCareDeviceConfig, + entity_descriptions: tuple[ViCareSensorEntityDescription, ...], +) -> list[ViCareSensor]: + """Create component specific ViCare sensor entities.""" + + return [ + ViCareSensor( + component, + device_config, + description, + ) + for component in components + for description in entity_descriptions + if is_supported(description.key, description, component) + ] + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare sensor devices.""" - api = hass.data[DOMAIN][config_entry.entry_id][VICARE_API] + api: PyViCareDevice = hass.data[DOMAIN][config_entry.entry_id][VICARE_API] + device_config: PyViCareDeviceConfig = hass.data[DOMAIN][config_entry.entry_id][ + VICARE_DEVICE_CONFIG + ] - entities = [] - for description in GLOBAL_SENSORS: - entity = await hass.async_add_executor_job( - _build_entity, - description.name, + async_add_entities( + await hass.async_add_executor_job( + _build_entities, api, - hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG], - description, + device_config, ) - if entity is not None: - entities.append(entity) - - try: - await _entities_from_descriptions( - hass, entities, CIRCUIT_SENSORS, api.circuits, config_entry - ) - except PyViCareNotSupportedFeatureError: - _LOGGER.info("No circuits found") - - try: - await _entities_from_descriptions( - hass, entities, BURNER_SENSORS, api.burners, config_entry - ) - except PyViCareNotSupportedFeatureError: - _LOGGER.info("No burners found") - - try: - await _entities_from_descriptions( - hass, entities, COMPRESSOR_SENSORS, api.compressors, config_entry - ) - except PyViCareNotSupportedFeatureError: - _LOGGER.info("No compressors found") - - async_add_entities(entities) + ) class ViCareSensor(ViCareEntity, SensorEntity): @@ -667,31 +718,21 @@ class ViCareSensor(ViCareEntity, SensorEntity): entity_description: ViCareSensorEntityDescription def __init__( - self, name, api, device_config, description: ViCareSensorEntityDescription + self, + api, + device_config: PyViCareDeviceConfig, + description: ViCareSensorEntityDescription, ) -> None: """Initialize the sensor.""" - super().__init__(device_config) + super().__init__(device_config, api, description.key) self.entity_description = description - self._attr_name = name - self._api = api - self._device_config = device_config @property - def available(self): + def available(self) -> bool: """Return True if entity is available.""" return self._attr_native_value is not None - @property - def unique_id(self) -> str: - """Return unique ID for this device.""" - tmp_id = ( - f"{self._device_config.getConfig().serial}-{self.entity_description.key}" - ) - if hasattr(self._api, "id"): - return f"{tmp_id}-{self._api.id}" - return tmp_id - - def update(self): + def update(self) -> None: """Update state of sensor.""" try: with suppress(PyViCareNotSupportedFeatureError): diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 2dc1eecd1e4..47ee60b2ea8 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -28,6 +28,277 @@ "unknown": "[%key:common::config_flow::error::unknown%]" } }, + "entity": { + "binary_sensor": { + "circulation_pump": { + "name": "Circulation pump" + }, + "frost_protection": { + "name": "Frost protection" + }, + "burner": { + "name": "Burner" + }, + "compressor": { + "name": "Compressor" + }, + "solar_pump": { + "name": "Solar pump" + }, + "domestic_hot_water_charging": { + "name": "DHW charging" + }, + "domestic_hot_water_circulation_pump": { + "name": "DHW circulation pump" + }, + "domestic_hot_water_pump": { + "name": "DHW pump" + } + }, + "button": { + "activate_onetimecharge": { + "name": "Activate one-time charge" + } + }, + "climate": { + "heating": { + "name": "Heating" + } + }, + "number": { + "heating_curve_shift": { + "name": "Heating curve shift" + }, + "heating_curve_slope": { + "name": "Heating curve slope" + }, + "normal_temperature": { + "name": "Normal temperature" + }, + "reduced_temperature": { + "name": "Reduced temperature" + }, + "comfort_temperature": { + "name": "Comfort temperature" + }, + "eco_temperature": { + "name": "Eco temperature" + } + }, + "sensor": { + "outside_temperature": { + "name": "Outside temperature" + }, + "return_temperature": { + "name": "Return temperature" + }, + "boiler_temperature": { + "name": "Boiler temperature" + }, + "boiler_supply_temperature": { + "name": "Boiler supply temperature" + }, + "primary_circuit_supply_temperature": { + "name": "Primary circuit supply temperature" + }, + "primary_circuit_return_temperature": { + "name": "Primary circuit return temperature" + }, + "secondary_circuit_supply_temperature": { + "name": "Secondary circuit supply temperature" + }, + "secondary_circuit_return_temperature": { + "name": "Secondary circuit return temperature" + }, + "hotwater_out_temperature": { + "name": "DHW out temperature" + }, + "hotwater_max_temperature": { + "name": "DHW max temperature" + }, + "hotwater_min_temperature": { + "name": "DHW min temperature" + }, + "hotwater_gas_consumption_today": { + "name": "DHW gas consumption today" + }, + "hotwater_gas_consumption_heating_this_week": { + "name": "DHW gas consumption this week" + }, + "hotwater_gas_consumption_heating_this_month": { + "name": "DHW gas consumption this month" + }, + "hotwater_gas_consumption_heating_this_year": { + "name": "DHW gas consumption this year" + }, + "gas_consumption_heating_today": { + "name": "Heating gas consumption today" + }, + "gas_consumption_heating_this_week": { + "name": "Heating gas consumption this week" + }, + "gas_consumption_heating_this_month": { + "name": "Heating gas consumption this month" + }, + "gas_consumption_heating_this_year": { + "name": "Heating gas consumption this year" + }, + "gas_summary_consumption_heating_currentday": { + "name": "Heating gas consumption current day" + }, + "gas_summary_consumption_heating_currentmonth": { + "name": "Heating gas consumption current month" + }, + "gas_summary_consumption_heating_currentyear": { + "name": "Heating gas consumption current year" + }, + "gas_summary_consumption_heating_lastsevendays": { + "name": "Heating gas consumption last seven days" + }, + "hotwater_gas_summary_consumption_heating_currentday": { + "name": "DHW gas consumption current day" + }, + "hotwater_gas_summary_consumption_heating_currentmonth": { + "name": "DHW gas consumption current month" + }, + "hotwater_gas_summary_consumption_heating_currentyear": { + "name": "DHW gas consumption current year" + }, + "hotwater_gas_summary_consumption_heating_lastsevendays": { + "name": "DHW gas consumption last seven days" + }, + "energy_summary_consumption_heating_currentday": { + "name": "Energy consumption of gas heating current day" + }, + "energy_summary_consumption_heating_currentmonth": { + "name": "Energy consumption of gas heating current month" + }, + "energy_summary_consumption_heating_currentyear": { + "name": "Energy consumption of gas heating current year" + }, + "energy_summary_consumption_heating_lastsevendays": { + "name": "Energy consumption of gas heating last seven days" + }, + "energy_dhw_summary_consumption_heating_currentday": { + "name": "Energy consumption of hot water gas heating current day" + }, + "energy_dhw_summary_consumption_heating_currentmonth": { + "name": "Energy consumption of hot water gas heating current month" + }, + "energy_dhw_summary_consumption_heating_currentyear": { + "name": "Energy consumption of hot water gas heating current year" + }, + "energy_summary_dhw_consumption_heating_lastsevendays": { + "name": "Energy consumption of hot water gas heating last seven days" + }, + "power_production_current": { + "name": "Power production current" + }, + "power_production_today": { + "name": "Energy production today" + }, + "power_production_this_week": { + "name": "Energy production this week" + }, + "power_production_this_month": { + "name": "Energy production this month" + }, + "power_production_this_year": { + "name": "Energy production this year" + }, + "solar_storage_temperature": { + "name": "Solar storage temperature" + }, + "collector_temperature": { + "name": "Solar collector temperature" + }, + "solar_power_production_today": { + "name": "Solar energy production today" + }, + "solar_power_production_this_week": { + "name": "Solar energy production this week" + }, + "solar_power_production_this_month": { + "name": "Solar energy production this month" + }, + "solar_power_production_this_year": { + "name": "Solar energy production this year" + }, + "power_consumption_today": { + "name": "Energy consumption today" + }, + "power_consumption_this_week": { + "name": "Power consumption this week" + }, + "power_consumption_this_month": { + "name": "Energy consumption this month" + }, + "power_consumption_this_year": { + "name": "Energy consumption this year" + }, + "buffer_top_temperature": { + "name": "Buffer top temperature" + }, + "buffer_main_temperature": { + "name": "Buffer main temperature" + }, + "volumetric_flow": { + "name": "Volumetric flow" + }, + "supply_temperature": { + "name": "Supply temperature" + }, + "burner_starts": { + "name": "Burner starts" + }, + "burner_hours": { + "name": "Burner hours" + }, + "burner_modulation": { + "name": "Burner modulation" + }, + "compressor_starts": { + "name": "Compressor starts" + }, + "compressor_hours": { + "name": "Compressor hours" + }, + "compressor_hours_loadclass1": { + "name": "Compressor hours load class 1" + }, + "compressor_hours_loadclass2": { + "name": "Compressor hours load class 2" + }, + "compressor_hours_loadclass3": { + "name": "Compressor hours load class 3" + }, + "compressor_hours_loadclass4": { + "name": "Compressor hours load class 4" + }, + "compressor_hours_loadclass5": { + "name": "Compressor hours load class 5" + }, + "compressor_phase": { + "name": "Compressor phase" + } + }, + "water_heater": { + "domestic_hot_water": { + "name": "Domestic hot water" + } + } + }, + "exceptions": { + "program_unknown": { + "message": "Cannot translate preset {preset} into a valid ViCare program" + }, + "program_not_activated": { + "message": "Unable to activate ViCare program {program}" + }, + "program_not_deactivated": { + "message": "Unable to deactivate ViCare program {program}" + } + }, "services": { "set_vicare_mode": { "name": "Set ViCare mode", diff --git a/homeassistant/components/vicare/utils.py b/homeassistant/components/vicare/utils.py index 19a75c00962..5b3fb38337f 100644 --- a/homeassistant/components/vicare/utils.py +++ b/homeassistant/components/vicare/utils.py @@ -1,6 +1,10 @@ """ViCare helpers functions.""" import logging +from PyViCare.PyViCareDevice import Device as PyViCareDevice +from PyViCare.PyViCareHeatingDevice import ( + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, +) from PyViCare.PyViCareUtils import PyViCareNotSupportedFeatureError from . import ViCareRequiredKeysMixin @@ -24,3 +28,30 @@ def is_supported( _LOGGER.debug("Attribute Error %s: %s", name, error) return False return True + + +def get_burners(device: PyViCareDevice) -> list[PyViCareHeatingDeviceComponent]: + """Return the list of burners.""" + try: + return device.burners + except PyViCareNotSupportedFeatureError: + _LOGGER.debug("No burners found") + return [] + + +def get_circuits(device: PyViCareDevice) -> list[PyViCareHeatingDeviceComponent]: + """Return the list of circuits.""" + try: + return device.circuits + except PyViCareNotSupportedFeatureError: + _LOGGER.debug("No circuits found") + return [] + + +def get_compressors(device: PyViCareDevice) -> list[PyViCareHeatingDeviceComponent]: + """Return the list of compressors.""" + try: + return device.compressors + except PyViCareNotSupportedFeatureError: + _LOGGER.debug("No compressors found") + return [] diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index db8a959f4ae..66a90ca065b 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -1,8 +1,13 @@ """Viessmann ViCare water_heater device.""" +from __future__ import annotations + from contextlib import suppress import logging from typing import Any +from PyViCare.PyViCareDevice import Device as PyViCareDevice +from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareHeatingDevice import HeatingCircuit as PyViCareHeatingCircuit from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, PyViCareNotSupportedFeatureError, @@ -21,6 +26,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, VICARE_API, VICARE_DEVICE_CONFIG from .entity import ViCareEntity +from .utils import get_circuits _LOGGER = logging.getLogger(__name__) @@ -54,13 +60,20 @@ HA_TO_VICARE_HVAC_DHW = { } -def _get_circuits(vicare_api): - """Return the list of circuits.""" - try: - return vicare_api.circuits - except PyViCareNotSupportedFeatureError: - _LOGGER.info("No circuits found") - return [] +def _build_entities( + api: PyViCareDevice, + device_config: PyViCareDeviceConfig, +) -> list[ViCareWater]: + """Create ViCare domestic hot water entities for a device.""" + return [ + ViCareWater( + api, + circuit, + device_config, + "domestic_hot_water", + ) + for circuit in get_circuits(api) + ] async def async_setup_entry( @@ -68,25 +81,17 @@ async def async_setup_entry( config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Set up the ViCare climate platform.""" - entities = [] + """Set up the ViCare water heater platform.""" api = hass.data[DOMAIN][config_entry.entry_id][VICARE_API] - circuits = await hass.async_add_executor_job(_get_circuits, api) + device_config = hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG] - for circuit in circuits: - suffix = "" - if len(circuits) > 1: - suffix = f" {circuit.id}" - - entity = ViCareWater( - f"Water{suffix}", + async_add_entities( + await hass.async_add_executor_job( + _build_entities, api, - circuit, - hass.data[DOMAIN][config_entry.entry_id][VICARE_DEVICE_CONFIG], + device_config, ) - entities.append(entity) - - async_add_entities(entities) + ) class ViCareWater(ViCareEntity, WaterHeaterEntity): @@ -99,15 +104,19 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): _attr_max_temp = VICARE_TEMP_WATER_MAX _attr_operation_list = list(HA_TO_VICARE_HVAC_DHW) - def __init__(self, name, api, circuit, device_config) -> None: + def __init__( + self, + api: PyViCareDevice, + circuit: PyViCareHeatingCircuit, + device_config: PyViCareDeviceConfig, + translation_key: str, + ) -> None: """Initialize the DHW water_heater device.""" - super().__init__(device_config) - self._attr_name = name - self._api = api + super().__init__(device_config, api, circuit.id) self._circuit = circuit self._attributes: dict[str, Any] = {} self._current_mode = None - self._attr_unique_id = f"{device_config.getConfig().serial}-{circuit.id}" + self._attr_translation_key = translation_key def update(self) -> None: """Let HA know there has been an update from the ViCare API.""" diff --git a/homeassistant/components/vodafone_station/coordinator.py b/homeassistant/components/vodafone_station/coordinator.py index a2cddcf9a65..ff51f009f3c 100644 --- a/homeassistant/components/vodafone_station/coordinator.py +++ b/homeassistant/components/vodafone_station/coordinator.py @@ -97,6 +97,9 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): try: try: await self.api.login() + raw_data_devices = await self.api.get_devices_data() + data_sensors = await self.api.get_sensor_data() + await self.api.logout() except exceptions.CannotAuthenticate as err: raise ConfigEntryAuthFailed from err except ( @@ -117,10 +120,8 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): dev_info, utc_point_in_time ), ) - for dev_info in (await self.api.get_devices_data()).values() + for dev_info in (raw_data_devices).values() } - data_sensors = await self.api.get_sensor_data() - await self.api.logout() return UpdateCoordinatorDataType(data_devices, data_sensors) @property diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index 2a1814c83d0..20ea4db057e 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vodafone_station", "iot_class": "local_polling", "loggers": ["aiovodafone"], - "requirements": ["aiovodafone==0.4.2"] + "requirements": ["aiovodafone==0.4.3"] } diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index 1bda3b1595d..8d9cb444fc9 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -28,9 +28,9 @@ NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"] class VodafoneStationBaseEntityDescription: """Vodafone Station entity base description.""" - value: Callable[ - [Any, Any], Any - ] = lambda coordinator, key: coordinator.data.sensors[key] + value: Callable[[Any, Any], Any] = ( + lambda coordinator, key: coordinator.data.sensors[key] + ) is_suitable: Callable[[dict], bool] = lambda val: True diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 6ea97268684..11f70c631f1 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -5,10 +5,12 @@ import asyncio from collections import deque from collections.abc import AsyncIterable, MutableSequence, Sequence from functools import partial +import io import logging from pathlib import Path import time from typing import TYPE_CHECKING +import wave from voip_utils import ( CallInfo, @@ -37,7 +39,7 @@ from homeassistant.components.assist_pipeline.vad import ( ) from homeassistant.const import __version__ from homeassistant.core import Context, HomeAssistant -from homeassistant.util.ulid import ulid +from homeassistant.util.ulid import ulid_now from .const import CHANNELS, DOMAIN, RATE, RTP_AUDIO_SETTINGS, WIDTH @@ -111,11 +113,13 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol): valid_protocol_factory=lambda call_info, rtcp_state: make_protocol( hass, devices, call_info, rtcp_state ), - invalid_protocol_factory=lambda call_info, rtcp_state: PreRecordMessageProtocol( - hass, - "not_configured.pcm", - opus_payload_type=call_info.opus_payload_type, - rtcp_state=rtcp_state, + invalid_protocol_factory=( + lambda call_info, rtcp_state: PreRecordMessageProtocol( + hass, + "not_configured.pcm", + opus_payload_type=call_info.opus_payload_type, + rtcp_state=rtcp_state, + ) ), ) self.hass = hass @@ -219,7 +223,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): ) -> None: """Forward audio to pipeline STT and handle TTS.""" if self._session_id is None: - self._session_id = ulid() + self._session_id = ulid_now() # Play listening tone at the start of each cycle if self.listening_tone_enabled: @@ -283,7 +287,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): ), conversation_id=self._conversation_id, device_id=self.voip_device.device_id, - tts_audio_output="raw", + tts_audio_output="wav", ) if self._pipeline_error: @@ -385,11 +389,16 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): self._conversation_id = event.data["intent_output"]["conversation_id"] elif event.type == PipelineEventType.TTS_END: # Send TTS audio to caller over RTP - media_id = event.data["tts_output"]["media_id"] - self.hass.async_create_background_task( - self._send_tts(media_id), - "voip_pipeline_tts", - ) + tts_output = event.data["tts_output"] + if tts_output: + media_id = tts_output["media_id"] + self.hass.async_create_background_task( + self._send_tts(media_id), + "voip_pipeline_tts", + ) + else: + # Empty TTS response + self._tts_done.set() elif event.type == PipelineEventType.ERROR: # Play error tone instead of wait for TTS self._pipeline_error = True @@ -400,11 +409,32 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): if self.transport is None: return - _extension, audio_bytes = await tts.async_get_media_source_audio( + extension, data = await tts.async_get_media_source_audio( self.hass, media_id, ) + if extension != "wav": + raise ValueError(f"Only WAV audio can be streamed, got {extension}") + + with io.BytesIO(data) as wav_io: + with wave.open(wav_io, "rb") as wav_file: + sample_rate = wav_file.getframerate() + sample_width = wav_file.getsampwidth() + sample_channels = wav_file.getnchannels() + + if ( + (sample_rate != 16000) + or (sample_width != 2) + or (sample_channels != 1) + ): + raise ValueError( + "Expected rate/width/channels as 16000/2/1," + " got {sample_rate}/{sample_width}/{sample_channels}}" + ) + + audio_bytes = wav_file.readframes(wav_file.getnframes()) + _LOGGER.debug("Sending %s byte(s) of audio", len(audio_bytes)) # Time out 1 second after TTS audio should be finished @@ -412,7 +442,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): tts_seconds = tts_samples / RATE async with asyncio.timeout(tts_seconds + self.tts_extra_timeout): - # Assume TTS audio is 16Khz 16-bit mono + # TTS audio is 16Khz 16-bit mono await self._async_send_audio(audio_bytes) except asyncio.TimeoutError as err: _LOGGER.warning("TTS timeout") diff --git a/homeassistant/components/wallbox/number.py b/homeassistant/components/wallbox/number.py index 9694e13103c..b47eb14d58a 100644 --- a/homeassistant/components/wallbox/number.py +++ b/homeassistant/components/wallbox/number.py @@ -35,7 +35,7 @@ def min_charging_current_value(coordinator: WallboxCoordinator) -> float: in BIDIRECTIONAL_MODEL_PREFIXES ): return cast(float, (coordinator.data[CHARGER_MAX_AVAILABLE_POWER_KEY] * -1)) - return 0 + return 6 @dataclass diff --git a/homeassistant/components/waqi/manifest.json b/homeassistant/components/waqi/manifest.json index f5731da2a7e..d742fd72858 100644 --- a/homeassistant/components/waqi/manifest.json +++ b/homeassistant/components/waqi/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/waqi", "iot_class": "cloud_polling", "loggers": ["aiowaqi"], - "requirements": ["aiowaqi==3.0.0"] + "requirements": ["aiowaqi==3.0.1"] } diff --git a/homeassistant/components/weather/__init__.py b/homeassistant/components/weather/__init__.py index d04daf2b160..3d9eccd9425 100644 --- a/homeassistant/components/weather/__init__.py +++ b/homeassistant/components/weather/__init__.py @@ -135,7 +135,9 @@ SCAN_INTERVAL = timedelta(seconds=30) ROUNDING_PRECISION = 2 -SERVICE_GET_FORECAST: Final = "get_forecast" +LEGACY_SERVICE_GET_FORECAST: Final = "get_forecast" +"""Deprecated: please use SERVICE_GET_FORECASTS.""" +SERVICE_GET_FORECASTS: Final = "get_forecasts" _ObservationUpdateCoordinatorT = TypeVar( "_ObservationUpdateCoordinatorT", bound="DataUpdateCoordinator[Any]" @@ -211,7 +213,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: _LOGGER, DOMAIN, hass, SCAN_INTERVAL ) component.async_register_legacy_entity_service( - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, {vol.Required("type"): vol.In(("daily", "hourly", "twice_daily"))}, async_get_forecast_service, required_features=[ @@ -221,6 +223,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ], supports_response=SupportsResponse.ONLY, ) + component.async_register_entity_service( + SERVICE_GET_FORECASTS, + {vol.Required("type"): vol.In(("daily", "hourly", "twice_daily"))}, + async_get_forecasts_service, + required_features=[ + WeatherEntityFeature.FORECAST_DAILY, + WeatherEntityFeature.FORECAST_HOURLY, + WeatherEntityFeature.FORECAST_TWICE_DAILY, + ], + supports_response=SupportsResponse.ONLY, + ) async_setup_ws_api(hass) await component.async_setup(config) return True @@ -1086,6 +1099,32 @@ def raise_unsupported_forecast(entity_id: str, forecast_type: str) -> None: async def async_get_forecast_service( weather: WeatherEntity, service_call: ServiceCall +) -> ServiceResponse: + """Get weather forecast. + + Deprecated: please use async_get_forecasts_service. + """ + _LOGGER.warning( + "Detected use of service 'weather.get_forecast'. " + "This is deprecated and will stop working in Home Assistant 2024.6. " + "Use 'weather.get_forecasts' instead which supports multiple entities", + ) + ir.async_create_issue( + weather.hass, + DOMAIN, + "deprecated_service_weather_get_forecast", + breaks_in_ha_version="2024.6.0", + is_fixable=True, + is_persistent=False, + issue_domain=weather.platform.platform_name, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_service_weather_get_forecast", + ) + return await async_get_forecasts_service(weather, service_call) + + +async def async_get_forecasts_service( + weather: WeatherEntity, service_call: ServiceCall ) -> ServiceResponse: """Get weather forecast.""" forecast_type = service_call.data["type"] diff --git a/homeassistant/components/weather/intent.py b/homeassistant/components/weather/intent.py new file mode 100644 index 00000000000..4fd22ceb0a9 --- /dev/null +++ b/homeassistant/components/weather/intent.py @@ -0,0 +1,85 @@ +"""Intents for the weather integration.""" +from __future__ import annotations + +import voluptuous as vol + +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import intent +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent + +from . import DOMAIN, WeatherEntity + +INTENT_GET_WEATHER = "HassGetWeather" + + +async def async_setup_intents(hass: HomeAssistant) -> None: + """Set up the weather intents.""" + intent.async_register(hass, GetWeatherIntent()) + + +class GetWeatherIntent(intent.IntentHandler): + """Handle GetWeather intents.""" + + intent_type = INTENT_GET_WEATHER + slot_schema = {vol.Optional("name"): cv.string} + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Handle the intent.""" + hass = intent_obj.hass + slots = self.async_validate_slots(intent_obj.slots) + + weather: WeatherEntity | None = None + weather_state: State | None = None + component: EntityComponent[WeatherEntity] = hass.data[DOMAIN] + entities = list(component.entities) + + if "name" in slots: + # Named weather entity + weather_name = slots["name"]["value"] + + # Find matching weather entity + matching_states = intent.async_match_states( + hass, name=weather_name, domains=[DOMAIN] + ) + for maybe_weather_state in matching_states: + weather = component.get_entity(maybe_weather_state.entity_id) + if weather is not None: + weather_state = maybe_weather_state + break + + if weather is None: + raise intent.IntentHandleError( + f"No weather entity named {weather_name}" + ) + elif entities: + # First weather entity + weather = entities[0] + weather_name = weather.name + weather_state = hass.states.get(weather.entity_id) + + if weather is None: + raise intent.IntentHandleError("No weather entity") + + if weather_state is None: + raise intent.IntentHandleError(f"No state for weather: {weather.name}") + + assert weather is not None + assert weather_state is not None + + # Create response + response = intent_obj.create_response() + response.response_type = intent.IntentResponseType.QUERY_ANSWER + response.async_set_results( + success_results=[ + intent.IntentResponseTarget( + type=intent.IntentResponseTargetType.ENTITY, + name=weather_name, + id=weather.entity_id, + ) + ] + ) + + response.async_set_states(matched_states=[weather_state]) + + return response diff --git a/homeassistant/components/weather/services.yaml b/homeassistant/components/weather/services.yaml index b2b71396fab..222dbf596d0 100644 --- a/homeassistant/components/weather/services.yaml +++ b/homeassistant/components/weather/services.yaml @@ -16,3 +16,21 @@ get_forecast: - "hourly" - "twice_daily" translation_key: forecast_type +get_forecasts: + target: + entity: + domain: weather + supported_features: + - weather.WeatherEntityFeature.FORECAST_DAILY + - weather.WeatherEntityFeature.FORECAST_HOURLY + - weather.WeatherEntityFeature.FORECAST_TWICE_DAILY + fields: + type: + required: true + selector: + select: + options: + - "daily" + - "hourly" + - "twice_daily" + translation_key: forecast_type diff --git a/homeassistant/components/weather/strings.json b/homeassistant/components/weather/strings.json index f76e93c66c3..0b712a4de05 100644 --- a/homeassistant/components/weather/strings.json +++ b/homeassistant/components/weather/strings.json @@ -88,13 +88,23 @@ } }, "services": { + "get_forecasts": { + "name": "Get forecasts", + "description": "Get weather forecasts.", + "fields": { + "type": { + "name": "Forecast type", + "description": "Forecast type: daily, hourly or twice daily." + } + } + }, "get_forecast": { "name": "Get forecast", "description": "Get weather forecast.", "fields": { "type": { - "name": "Forecast type", - "description": "Forecast type: daily, hourly or twice daily." + "name": "[%key:component::weather::services::get_forecasts::fields::type::name%]", + "description": "[%key:component::weather::services::get_forecasts::fields::type::description%]" } } } @@ -107,6 +117,17 @@ "deprecated_weather_forecast_no_url": { "title": "[%key:component::weather::issues::deprecated_weather_forecast_url::title%]", "description": "The custom integration `{platform}` implements the `forecast` property or sets `self._attr_forecast` in a subclass of WeatherEntity.\n\nPlease report it to the author of the {platform} integration.\n\nOnce an updated version of `{platform}` is available, install it and restart Home Assistant to fix this issue." + }, + "deprecated_service_weather_get_forecast": { + "title": "Detected use of deprecated service `weather.get_forecast`", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::weather::issues::deprecated_service_weather_get_forecast::title%]", + "description": "Use `weather.get_forecasts` instead which supports multiple entities.\n\nPlease replace this service and adjust your automations and scripts and select **submit** to close this issue." + } + } + } } } } diff --git a/homeassistant/components/webhook/__init__.py b/homeassistant/components/webhook/__init__.py index 5f82ca54283..16f3e5c7ef2 100644 --- a/homeassistant/components/webhook/__init__.py +++ b/homeassistant/components/webhook/__init__.py @@ -17,7 +17,7 @@ from homeassistant.components import websocket_api from homeassistant.components.http.view import HomeAssistantView from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.network import get_url +from homeassistant.helpers.network import get_url, is_cloud_connection from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass from homeassistant.util import network @@ -145,13 +145,8 @@ async def async_handle_webhook( return Response(status=HTTPStatus.METHOD_NOT_ALLOWED) if webhook["local_only"] in (True, None) and not isinstance(request, MockRequest): - if has_cloud := "cloud" in hass.config.components: - from hass_nabucasa import remote # pylint: disable=import-outside-toplevel - - is_local = True - if has_cloud and remote.is_cloud_request.get(): - is_local = False - else: + is_local = not is_cloud_connection(hass) + if is_local: if TYPE_CHECKING: assert isinstance(request, Request) assert request.remote is not None diff --git a/homeassistant/components/websocket_api/__init__.py b/homeassistant/components/websocket_api/__init__.py index 9c2645aec57..f7086cc81db 100644 --- a/homeassistant/components/websocket_api/__init__.py +++ b/homeassistant/components/websocket_api/__init__.py @@ -17,6 +17,7 @@ from .const import ( # noqa: F401 ERR_INVALID_FORMAT, ERR_NOT_FOUND, ERR_NOT_SUPPORTED, + ERR_SERVICE_VALIDATION_ERROR, ERR_TEMPLATE_ERROR, ERR_TIMEOUT, ERR_UNAUTHORIZED, diff --git a/homeassistant/components/websocket_api/commands.py b/homeassistant/components/websocket_api/commands.py index 18688914e8b..cb90b46e182 100644 --- a/homeassistant/components/websocket_api/commands.py +++ b/homeassistant/components/websocket_api/commands.py @@ -778,7 +778,22 @@ async def handle_execute_script( context = connection.context(msg) script_obj = Script(hass, script_config, f"{const.DOMAIN} script", const.DOMAIN) - script_result = await script_obj.async_run(msg.get("variables"), context=context) + try: + script_result = await script_obj.async_run( + msg.get("variables"), context=context + ) + except ServiceValidationError as err: + connection.logger.error(err) + connection.logger.debug("", exc_info=err) + connection.send_error( + msg["id"], + const.ERR_SERVICE_VALIDATION_ERROR, + str(err), + translation_domain=err.translation_domain, + translation_key=err.translation_key, + translation_placeholders=err.translation_placeholders, + ) + return connection.send_result( msg["id"], { diff --git a/homeassistant/components/websocket_api/connection.py b/homeassistant/components/websocket_api/connection.py index 4581b3be773..25b6c90d1ba 100644 --- a/homeassistant/components/websocket_api/connection.py +++ b/homeassistant/components/websocket_api/connection.py @@ -255,7 +255,10 @@ class ActiveConnection: log_handler = self.logger.error code = const.ERR_UNKNOWN_ERROR - err_message = None + err_message: str | None = None + translation_domain: str | None = None + translation_key: str | None = None + translation_placeholders: dict[str, Any] | None = None if isinstance(err, Unauthorized): code = const.ERR_UNAUTHORIZED @@ -268,6 +271,10 @@ class ActiveConnection: err_message = "Timeout" elif isinstance(err, HomeAssistantError): err_message = str(err) + code = const.ERR_HOME_ASSISTANT_ERROR + translation_domain = err.translation_domain + translation_key = err.translation_key + translation_placeholders = err.translation_placeholders # This if-check matches all other errors but also matches errors which # result in an empty message. In that case we will also log the stack @@ -276,7 +283,16 @@ class ActiveConnection: err_message = "Unknown error" log_handler = self.logger.exception - self.send_message(messages.error_message(msg["id"], code, err_message)) + self.send_message( + messages.error_message( + msg["id"], + code, + err_message, + translation_domain=translation_domain, + translation_key=translation_key, + translation_placeholders=translation_placeholders, + ) + ) if code: err_message += f" ({code})" diff --git a/homeassistant/components/whois/sensor.py b/homeassistant/components/whois/sensor.py index beca3540e8e..0116f542a3c 100644 --- a/homeassistant/components/whois/sensor.py +++ b/homeassistant/components/whois/sensor.py @@ -27,20 +27,13 @@ from homeassistant.util import dt as dt_util from .const import ATTR_EXPIRES, ATTR_NAME_SERVERS, ATTR_REGISTRAR, ATTR_UPDATED, DOMAIN -@dataclass -class WhoisSensorEntityDescriptionMixin: - """Mixin for required keys.""" +@dataclass(kw_only=True) +class WhoisSensorEntityDescription(SensorEntityDescription): + """Describes a Whois sensor entity.""" value_fn: Callable[[Domain], datetime | int | str | None] -@dataclass -class WhoisSensorEntityDescription( - SensorEntityDescription, WhoisSensorEntityDescriptionMixin -): - """Describes a Whois sensor entity.""" - - def _days_until_expiration(domain: Domain) -> int | None: """Calculate days left until domain expires.""" if domain.expiration_date is None: diff --git a/homeassistant/components/wirelesstag/__init__.py b/homeassistant/components/wirelesstag/__init__.py index 06fbfa3621e..cfbdb6bdc92 100644 --- a/homeassistant/components/wirelesstag/__init__.py +++ b/homeassistant/components/wirelesstag/__init__.py @@ -5,6 +5,7 @@ from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol from wirelesstagpy import WirelessTags from wirelesstagpy.exceptions import WirelessTagsException +from wirelesstagpy.sensortag import SensorTag from homeassistant.components import persistent_notification from homeassistant.const import ( @@ -17,6 +18,7 @@ from homeassistant.const import ( UnitOfElectricPotential, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import dispatcher_send from homeassistant.helpers.entity import Entity @@ -126,6 +128,22 @@ class WirelessTagPlatform: self.api.start_monitoring(push_callback) +def async_migrate_unique_id( + hass: HomeAssistant, tag: SensorTag, domain: str, key: str +) -> None: + """Migrate old unique id to new one with use of tag's uuid.""" + registry = er.async_get(hass) + new_unique_id = f"{tag.uuid}_{key}" + + if registry.async_get_entity_id(domain, DOMAIN, new_unique_id): + return + + old_unique_id = f"{tag.tag_id}_{key}" + if entity_id := registry.async_get_entity_id(domain, DOMAIN, old_unique_id): + _LOGGER.debug("Updating unique id for %s %s", key, entity_id) + registry.async_update_entity(entity_id, new_unique_id=new_unique_id) + + def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Wireless Sensor Tag component.""" conf = config[DOMAIN] diff --git a/homeassistant/components/wirelesstag/binary_sensor.py b/homeassistant/components/wirelesstag/binary_sensor.py index 711c2987735..64a1097bcab 100644 --- a/homeassistant/components/wirelesstag/binary_sensor.py +++ b/homeassistant/components/wirelesstag/binary_sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations import voluptuous as vol from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity -from homeassistant.const import CONF_MONITORED_CONDITIONS, STATE_OFF, STATE_ON +from homeassistant.const import CONF_MONITORED_CONDITIONS, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -15,6 +15,7 @@ from . import ( DOMAIN as WIRELESSTAG_DOMAIN, SIGNAL_BINARY_EVENT_UPDATE, WirelessTagBaseSensor, + async_migrate_unique_id, ) # On means in range, Off means out of range @@ -72,10 +73,10 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( ) -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the platform for a WirelessTags.""" @@ -87,9 +88,10 @@ def setup_platform( allowed_sensor_types = tag.supported_binary_events_types for sensor_type in config[CONF_MONITORED_CONDITIONS]: if sensor_type in allowed_sensor_types: + async_migrate_unique_id(hass, tag, Platform.BINARY_SENSOR, sensor_type) sensors.append(WirelessTagBinarySensor(platform, tag, sensor_type)) - add_entities(sensors, True) + async_add_entities(sensors, True) class WirelessTagBinarySensor(WirelessTagBaseSensor, BinarySensorEntity): @@ -100,7 +102,7 @@ class WirelessTagBinarySensor(WirelessTagBaseSensor, BinarySensorEntity): super().__init__(api, tag) self._sensor_type = sensor_type self._name = f"{self._tag.name} {self.event.human_readable_name}" - self._attr_unique_id = f"{self.tag_id}_{self._sensor_type}" + self._attr_unique_id = f"{self._uuid}_{self._sensor_type}" async def async_added_to_hass(self) -> None: """Register callbacks.""" diff --git a/homeassistant/components/wirelesstag/sensor.py b/homeassistant/components/wirelesstag/sensor.py index fd9a7898f92..8ae20031723 100644 --- a/homeassistant/components/wirelesstag/sensor.py +++ b/homeassistant/components/wirelesstag/sensor.py @@ -12,14 +12,19 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import CONF_MONITORED_CONDITIONS +from homeassistant.const import CONF_MONITORED_CONDITIONS, Platform from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as WIRELESSTAG_DOMAIN, SIGNAL_TAG_UPDATE, WirelessTagBaseSensor +from . import ( + DOMAIN as WIRELESSTAG_DOMAIN, + SIGNAL_TAG_UPDATE, + WirelessTagBaseSensor, + async_migrate_unique_id, +) _LOGGER = logging.getLogger(__name__) @@ -68,10 +73,10 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( ) -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the sensor platform.""" @@ -83,9 +88,10 @@ def setup_platform( if key not in tag.allowed_sensor_types: continue description = SENSOR_TYPES[key] + async_migrate_unique_id(hass, tag, Platform.SENSOR, description.key) sensors.append(WirelessTagSensor(platform, tag, description)) - add_entities(sensors, True) + async_add_entities(sensors, True) class WirelessTagSensor(WirelessTagBaseSensor, SensorEntity): @@ -100,7 +106,7 @@ class WirelessTagSensor(WirelessTagBaseSensor, SensorEntity): self._sensor_type = description.key self.entity_description = description self._name = self._tag.name - self._attr_unique_id = f"{self.tag_id}_{self._sensor_type}" + self._attr_unique_id = f"{self._uuid}_{self._sensor_type}" # I want to see entity_id as: # sensor.wirelesstag_bedroom_temperature diff --git a/homeassistant/components/wirelesstag/switch.py b/homeassistant/components/wirelesstag/switch.py index df0f72aca18..7f4008623b1 100644 --- a/homeassistant/components/wirelesstag/switch.py +++ b/homeassistant/components/wirelesstag/switch.py @@ -10,13 +10,17 @@ from homeassistant.components.switch import ( SwitchEntity, SwitchEntityDescription, ) -from homeassistant.const import CONF_MONITORED_CONDITIONS +from homeassistant.const import CONF_MONITORED_CONDITIONS, Platform from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN as WIRELESSTAG_DOMAIN, WirelessTagBaseSensor +from . import ( + DOMAIN as WIRELESSTAG_DOMAIN, + WirelessTagBaseSensor, + async_migrate_unique_id, +) SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( SwitchEntityDescription( @@ -52,10 +56,10 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( ) -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up switches for a Wireless Sensor Tags.""" @@ -63,15 +67,17 @@ def setup_platform( tags = platform.load_tags() monitored_conditions = config[CONF_MONITORED_CONDITIONS] - entities = [ - WirelessTagSwitch(platform, tag, description) - for tag in tags.values() - for description in SWITCH_TYPES - if description.key in monitored_conditions - and description.key in tag.allowed_monitoring_types - ] + entities = [] + for tag in tags.values(): + for description in SWITCH_TYPES: + if ( + description.key in monitored_conditions + and description.key in tag.allowed_monitoring_types + ): + async_migrate_unique_id(hass, tag, Platform.SWITCH, description.key) + entities.append(WirelessTagSwitch(platform, tag, description)) - add_entities(entities, True) + async_add_entities(entities, True) class WirelessTagSwitch(WirelessTagBaseSensor, SwitchEntity): @@ -82,7 +88,7 @@ class WirelessTagSwitch(WirelessTagBaseSensor, SwitchEntity): super().__init__(api, tag) self.entity_description = description self._name = f"{self._tag.name} {description.name}" - self._attr_unique_id = f"{self.tag_id}_{description.key}" + self._attr_unique_id = f"{self._uuid}_{description.key}" def turn_on(self, **kwargs: Any) -> None: """Turn on the switch.""" diff --git a/homeassistant/components/withings/calendar.py b/homeassistant/components/withings/calendar.py index 19572682d1a..132f00936f3 100644 --- a/homeassistant/components/withings/calendar.py +++ b/homeassistant/components/withings/calendar.py @@ -66,7 +66,7 @@ def get_event_name(category: WorkoutCategory) -> str: class WithingsWorkoutCalendarEntity( - CalendarEntity, WithingsEntity[WithingsWorkoutDataUpdateCoordinator] + WithingsEntity[WithingsWorkoutDataUpdateCoordinator], CalendarEntity ): """A calendar entity.""" diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index d43ae7da50c..fe5704d119c 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_push", "loggers": ["aiowithings"], "quality_scale": "platinum", - "requirements": ["aiowithings==1.0.2"] + "requirements": ["aiowithings==2.0.0"] } diff --git a/homeassistant/components/withings/sensor.py b/homeassistant/components/withings/sensor.py index 707059a2930..36ac9ea7d73 100644 --- a/homeassistant/components/withings/sensor.py +++ b/homeassistant/components/withings/sensor.py @@ -58,20 +58,13 @@ from .coordinator import ( from .entity import WithingsEntity -@dataclass -class WithingsMeasurementSensorEntityDescriptionMixin: - """Mixin for describing withings data.""" +@dataclass(kw_only=True) +class WithingsMeasurementSensorEntityDescription(SensorEntityDescription): + """Immutable class for describing withings data.""" measurement_type: MeasurementType -@dataclass -class WithingsMeasurementSensorEntityDescription( - SensorEntityDescription, WithingsMeasurementSensorEntityDescriptionMixin -): - """Immutable class for describing withings data.""" - - MEASUREMENT_SENSORS: dict[ MeasurementType, WithingsMeasurementSensorEntityDescription ] = { @@ -243,20 +236,13 @@ MEASUREMENT_SENSORS: dict[ } -@dataclass -class WithingsSleepSensorEntityDescriptionMixin: - """Mixin for describing withings data.""" +@dataclass(kw_only=True) +class WithingsSleepSensorEntityDescription(SensorEntityDescription): + """Immutable class for describing withings data.""" value_fn: Callable[[SleepSummary], StateType] -@dataclass -class WithingsSleepSensorEntityDescription( - SensorEntityDescription, WithingsSleepSensorEntityDescriptionMixin -): - """Immutable class for describing withings data.""" - - SLEEP_SENSORS = [ WithingsSleepSensorEntityDescription( key="sleep_breathing_disturbances_intensity", @@ -410,20 +396,13 @@ SLEEP_SENSORS = [ ] -@dataclass -class WithingsActivitySensorEntityDescriptionMixin: - """Mixin for describing withings data.""" +@dataclass(kw_only=True) +class WithingsActivitySensorEntityDescription(SensorEntityDescription): + """Immutable class for describing withings data.""" value_fn: Callable[[Activity], StateType] -@dataclass -class WithingsActivitySensorEntityDescription( - SensorEntityDescription, WithingsActivitySensorEntityDescriptionMixin -): - """Immutable class for describing withings data.""" - - ACTIVITY_SENSORS = [ WithingsActivitySensorEntityDescription( key="activity_steps_today", @@ -445,10 +424,11 @@ ACTIVITY_SENSORS = [ ), WithingsActivitySensorEntityDescription( key="activity_floors_climbed_today", - value_fn=lambda activity: activity.floors_climbed, - translation_key="activity_floors_climbed_today", + value_fn=lambda activity: activity.elevation, + translation_key="activity_elevation_today", icon="mdi:stairs-up", - native_unit_of_measurement="floors", + native_unit_of_measurement=UnitOfLength.METERS, + device_class=SensorDeviceClass.DISTANCE, state_class=SensorStateClass.TOTAL, ), WithingsActivitySensorEntityDescription( @@ -514,20 +494,13 @@ SLEEP_GOAL = "sleep" WEIGHT_GOAL = "weight" -@dataclass -class WithingsGoalsSensorEntityDescriptionMixin: - """Mixin for describing withings data.""" +@dataclass(kw_only=True) +class WithingsGoalsSensorEntityDescription(SensorEntityDescription): + """Immutable class for describing withings data.""" value_fn: Callable[[Goals], StateType] -@dataclass -class WithingsGoalsSensorEntityDescription( - SensorEntityDescription, WithingsGoalsSensorEntityDescriptionMixin -): - """Immutable class for describing withings data.""" - - GOALS_SENSORS: dict[str, WithingsGoalsSensorEntityDescription] = { STEP_GOAL: WithingsGoalsSensorEntityDescription( key="step_goal", @@ -558,20 +531,13 @@ GOALS_SENSORS: dict[str, WithingsGoalsSensorEntityDescription] = { } -@dataclass -class WithingsWorkoutSensorEntityDescriptionMixin: - """Mixin for describing withings data.""" +@dataclass(kw_only=True) +class WithingsWorkoutSensorEntityDescription(SensorEntityDescription): + """Immutable class for describing withings data.""" value_fn: Callable[[Workout], StateType] -@dataclass -class WithingsWorkoutSensorEntityDescription( - SensorEntityDescription, WithingsWorkoutSensorEntityDescriptionMixin -): - """Immutable class for describing withings data.""" - - _WORKOUT_CATEGORY = [ workout_category.name.lower() for workout_category in WorkoutCategory ] @@ -603,10 +569,11 @@ WORKOUT_SENSORS = [ ), WithingsWorkoutSensorEntityDescription( key="workout_floors_climbed", - value_fn=lambda workout: workout.floors_climbed, - translation_key="workout_floors_climbed", + value_fn=lambda workout: workout.elevation, + translation_key="workout_elevation", icon="mdi:stairs-up", - native_unit_of_measurement="floors", + native_unit_of_measurement=UnitOfLength.METERS, + device_class=SensorDeviceClass.DISTANCE, ), WithingsWorkoutSensorEntityDescription( key="workout_intensity", diff --git a/homeassistant/components/withings/strings.json b/homeassistant/components/withings/strings.json index fc24c1f5325..ffbbd9acc2b 100644 --- a/homeassistant/components/withings/strings.json +++ b/homeassistant/components/withings/strings.json @@ -158,8 +158,8 @@ "activity_distance_today": { "name": "Distance travelled today" }, - "activity_floors_climbed_today": { - "name": "Floors climbed today" + "activity_elevation_today": { + "name": "Elevation change today" }, "activity_soft_duration_today": { "name": "Soft activity today" @@ -239,8 +239,8 @@ "workout_distance": { "name": "Distance travelled last workout" }, - "workout_floors_climbed": { - "name": "Floors climbed last workout" + "workout_elevation": { + "name": "Elevation change last workout" }, "workout_intensity": { "name": "Last workout intensity" diff --git a/homeassistant/components/wiz/number.py b/homeassistant/components/wiz/number.py index f1212c75f25..76c4b197534 100644 --- a/homeassistant/components/wiz/number.py +++ b/homeassistant/components/wiz/number.py @@ -22,21 +22,14 @@ from .entity import WizEntity from .models import WizData -@dataclass -class WizNumberEntityDescriptionMixin: - """Mixin to describe a WiZ number entity.""" - - value_fn: Callable[[wizlight], int | None] - set_value_fn: Callable[[wizlight, int], Coroutine[None, None, None]] - required_feature: str - - -@dataclass -class WizNumberEntityDescription( - NumberEntityDescription, WizNumberEntityDescriptionMixin -): +@dataclass(kw_only=True) +class WizNumberEntityDescription(NumberEntityDescription): """Class to describe a WiZ number entity.""" + required_feature: str + set_value_fn: Callable[[wizlight, int], Coroutine[None, None, None]] + value_fn: Callable[[wizlight], int | None] + async def _async_set_speed(device: wizlight, speed: int) -> None: await device.set_speed(speed) diff --git a/homeassistant/components/wled/number.py b/homeassistant/components/wled/number.py index 9fb18d3e113..9ab5554a6b7 100644 --- a/homeassistant/components/wled/number.py +++ b/homeassistant/components/wled/number.py @@ -39,18 +39,13 @@ async def async_setup_entry( update_segments() -@dataclass -class WLEDNumberDescriptionMixin: - """Mixin for WLED number.""" +@dataclass(kw_only=True) +class WLEDNumberEntityDescription(NumberEntityDescription): + """Class describing WLED number entities.""" value_fn: Callable[[Segment], float | None] -@dataclass -class WLEDNumberEntityDescription(NumberEntityDescription, WLEDNumberDescriptionMixin): - """Class describing WLED number entities.""" - - NUMBERS = [ WLEDNumberEntityDescription( key=ATTR_SPEED, diff --git a/homeassistant/components/wled/sensor.py b/homeassistant/components/wled/sensor.py index 7d1431c093b..64cc3dc2812 100644 --- a/homeassistant/components/wled/sensor.py +++ b/homeassistant/components/wled/sensor.py @@ -31,20 +31,12 @@ from .coordinator import WLEDDataUpdateCoordinator from .models import WLEDEntity -@dataclass -class WLEDSensorEntityDescriptionMixin: - """Mixin for required keys.""" - - value_fn: Callable[[WLEDDevice], datetime | StateType] - - -@dataclass -class WLEDSensorEntityDescription( - SensorEntityDescription, WLEDSensorEntityDescriptionMixin -): +@dataclass(kw_only=True) +class WLEDSensorEntityDescription(SensorEntityDescription): """Describes WLED sensor entity.""" exists_fn: Callable[[WLEDDevice], bool] = lambda _: True + value_fn: Callable[[WLEDDevice], datetime | StateType] SENSORS: tuple[WLEDSensorEntityDescription, ...] = ( diff --git a/homeassistant/components/wolflink/__init__.py b/homeassistant/components/wolflink/__init__.py index 34df0176e29..73f49a2ad09 100644 --- a/homeassistant/components/wolflink/__init__.py +++ b/homeassistant/components/wolflink/__init__.py @@ -51,7 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: nonlocal refetch_parameters nonlocal parameters await wolf_client.update_session() - if not wolf_client.fetch_system_state_list(device_id, gateway_id): + if not await wolf_client.fetch_system_state_list(device_id, gateway_id): refetch_parameters = True raise UpdateFailed( "Could not fetch values from server because device is Offline." diff --git a/homeassistant/components/wolflink/const.py b/homeassistant/components/wolflink/const.py index ac5bbad48dc..59329ee41dd 100644 --- a/homeassistant/components/wolflink/const.py +++ b/homeassistant/components/wolflink/const.py @@ -7,6 +7,7 @@ PARAMETERS = "parameters" DEVICE_ID = "device_id" DEVICE_GATEWAY = "device_gateway" DEVICE_NAME = "device_name" +MANUFACTURER = "WOLF GmbH" STATES = { "Ein": "ein", diff --git a/homeassistant/components/wolflink/sensor.py b/homeassistant/components/wolflink/sensor.py index b4d60011658..2135239b3eb 100644 --- a/homeassistant/components/wolflink/sensor.py +++ b/homeassistant/components/wolflink/sensor.py @@ -15,10 +15,11 @@ from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfPressure, UnitOfTemperature, UnitOfTime from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import COORDINATOR, DEVICE_ID, DOMAIN, PARAMETERS, STATES +from .const import COORDINATOR, DEVICE_ID, DOMAIN, MANUFACTURER, PARAMETERS, STATES async def async_setup_entry( @@ -60,6 +61,11 @@ class WolfLinkSensor(CoordinatorEntity, SensorEntity): self._attr_name = wolf_object.name self._attr_unique_id = f"{device_id}:{wolf_object.parameter_id}" self._state = None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device_id)}, + configuration_url="https://www.wolf-smartset.com/", + manufacturer=MANUFACTURER, + ) @property def native_value(self): diff --git a/homeassistant/components/workday/binary_sensor.py b/homeassistant/components/workday/binary_sensor.py index 26f44fa1e2d..9cc96db7a57 100644 --- a/homeassistant/components/workday/binary_sensor.py +++ b/homeassistant/components/workday/binary_sensor.py @@ -83,6 +83,18 @@ async def async_setup_entry( years=year, language=language, ) + if ( + supported_languages := obj_holidays.supported_languages + ) and language == "en": + for lang in supported_languages: + if lang.startswith("en"): + obj_holidays = country_holidays( + country, + subdiv=province, + years=year, + language=lang, + ) + LOGGER.debug("Changing language from %s to %s", language, lang) else: obj_holidays = HolidayBase() diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index 1fbeea0684d..348bb0c2fba 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -18,6 +18,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.selector import ( CountrySelector, CountrySelectorConfig, + LanguageSelector, + LanguageSelectorConfig, NumberSelector, NumberSelectorConfig, NumberSelectorMode, @@ -62,14 +64,14 @@ def add_province_and_language_to_schema( _country = country_holidays(country=country) if country_default_language := (_country.default_language): selectable_languages = _country.supported_languages + new_selectable_languages = [] + for lang in selectable_languages: + new_selectable_languages.append(lang[:2]) language_schema = { vol.Optional( CONF_LANGUAGE, default=country_default_language - ): SelectSelector( - SelectSelectorConfig( - options=list(selectable_languages), - mode=SelectSelectorMode.DROPDOWN, - ) + ): LanguageSelector( + LanguageSelectorConfig(languages=new_selectable_languages) ) } @@ -109,12 +111,25 @@ def validate_custom_dates(user_input: dict[str, Any]) -> None: year: int = dt_util.now().year if country := user_input.get(CONF_COUNTRY): + language = user_input.get(CONF_LANGUAGE) + province = user_input.get(CONF_PROVINCE) obj_holidays = country_holidays( country=country, - subdiv=user_input.get(CONF_PROVINCE), + subdiv=province, years=year, - language=user_input.get(CONF_LANGUAGE), + language=language, ) + if ( + supported_languages := obj_holidays.supported_languages + ) and language == "en": + for lang in supported_languages: + if lang.startswith("en"): + obj_holidays = country_holidays( + country, + subdiv=province, + years=year, + language=lang, + ) else: obj_holidays = HolidayBase(years=year) diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 1c9a533d998..c7c993e70d0 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.35"] + "requirements": ["holidays==0.36"] } diff --git a/homeassistant/components/xiaomi_miio/fan.py b/homeassistant/components/xiaomi_miio/fan.py index a3bb28e7a8b..9be019ed724 100644 --- a/homeassistant/components/xiaomi_miio/fan.py +++ b/homeassistant/components/xiaomi_miio/fan.py @@ -530,9 +530,6 @@ class XiaomiAirPurifier(XiaomiGenericAirPurifier): This method is a coroutine. """ - if preset_mode not in self.preset_modes: - _LOGGER.warning("'%s'is not a valid preset mode", preset_mode) - return if await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, @@ -623,9 +620,6 @@ class XiaomiAirPurifierMB4(XiaomiGenericAirPurifier): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" - if preset_mode not in self.preset_modes: - _LOGGER.warning("'%s'is not a valid preset mode", preset_mode) - return if await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, @@ -721,9 +715,6 @@ class XiaomiAirFresh(XiaomiGenericAirPurifier): This method is a coroutine. """ - if preset_mode not in self.preset_modes: - _LOGGER.warning("'%s'is not a valid preset mode", preset_mode) - return if await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, @@ -809,9 +800,6 @@ class XiaomiAirFreshA1(XiaomiGenericAirPurifier): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan. This method is a coroutine.""" - if preset_mode not in self.preset_modes: - _LOGGER.warning("'%s'is not a valid preset mode", preset_mode) - return if await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, @@ -958,10 +946,6 @@ class XiaomiFan(XiaomiGenericFan): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" - if preset_mode not in self.preset_modes: - _LOGGER.warning("'%s'is not a valid preset mode", preset_mode) - return - if preset_mode == ATTR_MODE_NATURE: await self._try_command( "Setting natural fan speed percentage of the miio device failed.", @@ -1034,9 +1018,6 @@ class XiaomiFanP5(XiaomiGenericFan): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" - if preset_mode not in self.preset_modes: - _LOGGER.warning("'%s'is not a valid preset mode", preset_mode) - return await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, @@ -1093,9 +1074,6 @@ class XiaomiFanMiot(XiaomiGenericFan): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode of the fan.""" - if preset_mode not in self.preset_modes: - _LOGGER.warning("'%s'is not a valid preset mode", preset_mode) - return await self._try_command( "Setting operation mode of the miio device failed.", self._device.set_mode, diff --git a/homeassistant/components/yamaha_musiccast/config_flow.py b/homeassistant/components/yamaha_musiccast/config_flow.py index 94153a47fdc..b64f5aba6b7 100644 --- a/homeassistant/components/yamaha_musiccast/config_flow.py +++ b/homeassistant/components/yamaha_musiccast/config_flow.py @@ -95,9 +95,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN): self.upnp_description = discovery_info.ssdp_location # ssdp_location and hostname have been checked in check_yamaha_ssdp so it is safe to ignore type assignment - self.host = urlparse( - discovery_info.ssdp_location - ).hostname # type: ignore[assignment] + self.host = urlparse(discovery_info.ssdp_location).hostname # type: ignore[assignment] await self.async_set_unique_id(self.serial_number) self._abort_if_unique_id_configured( diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index 6c44736fa6d..b3bc0c30bf4 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -17,7 +17,7 @@ "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], "quality_scale": "platinum", - "requirements": ["yeelight==0.7.13", "async-upnp-client==0.36.2"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.36.2"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/components/zamg/manifest.json b/homeassistant/components/zamg/manifest.json index df17672231e..f83e38002b8 100644 --- a/homeassistant/components/zamg/manifest.json +++ b/homeassistant/components/zamg/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/zamg", "iot_class": "cloud_polling", - "requirements": ["zamg==0.3.0"] + "requirements": ["zamg==0.3.3"] } diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 7b47b854bd1..5eb77b0c41c 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.125.0"] + "requirements": ["zeroconf==0.127.0"] } diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index 222c7f1d4ef..2046070d6a5 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -9,12 +9,12 @@ import re import voluptuous as vol from zhaquirks import setup as setup_quirks from zigpy.config import CONF_DATABASE, CONF_DEVICE, CONF_DEVICE_PATH -from zigpy.exceptions import NetworkSettingsInconsistent +from zigpy.exceptions import NetworkSettingsInconsistent, TransientConnectionError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_TYPE, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send @@ -29,6 +29,7 @@ from .core.const import ( CONF_CUSTOM_QUIRKS_PATH, CONF_DEVICE_CONFIG, CONF_ENABLE_QUIRKS, + CONF_FLOW_CONTROL, CONF_RADIO_TYPE, CONF_USB_PATH, CONF_ZIGPY, @@ -36,6 +37,8 @@ from .core.const import ( DOMAIN, PLATFORMS, SIGNAL_ADD_ENTITIES, + STARTUP_FAILURE_DELAY_S, + STARTUP_RETRIES, RadioType, ) from .core.device import get_device_automation_triggers @@ -158,42 +161,67 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b _LOGGER.debug("Trigger cache: %s", zha_data.device_trigger_cache) - zha_gateway = ZHAGateway(hass, zha_data.yaml_config, config_entry) + # Retry setup a few times before giving up to deal with missing serial ports in VMs + for attempt in range(STARTUP_RETRIES): + try: + zha_gateway = await ZHAGateway.async_from_config( + hass=hass, + config=zha_data.yaml_config, + config_entry=config_entry, + ) + break + except NetworkSettingsInconsistent as exc: + await warn_on_inconsistent_network_settings( + hass, + config_entry=config_entry, + old_state=exc.old_state, + new_state=exc.new_state, + ) + raise ConfigEntryError( + "Network settings do not match most recent backup" + ) from exc + except TransientConnectionError as exc: + raise ConfigEntryNotReady from exc + except Exception as exc: # pylint: disable=broad-except + _LOGGER.debug( + "Couldn't start coordinator (attempt %s of %s)", + attempt + 1, + STARTUP_RETRIES, + exc_info=exc, + ) - try: - await zha_gateway.async_initialize() - except NetworkSettingsInconsistent as exc: - await warn_on_inconsistent_network_settings( - hass, - config_entry=config_entry, - old_state=exc.old_state, - new_state=exc.new_state, - ) - raise HomeAssistantError( - "Network settings do not match most recent backup" - ) from exc - except Exception: - if RadioType[config_entry.data[CONF_RADIO_TYPE]] == RadioType.ezsp: - try: - await warn_on_wrong_silabs_firmware( - hass, config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] - ) - except AlreadyRunningEZSP as exc: - # If connecting fails but we somehow probe EZSP (e.g. stuck in the - # bootloader), reconnect, it should work - raise ConfigEntryNotReady from exc + if attempt < STARTUP_RETRIES - 1: + await asyncio.sleep(STARTUP_FAILURE_DELAY_S) + continue - raise + if RadioType[config_entry.data[CONF_RADIO_TYPE]] == RadioType.ezsp: + try: + # Ignore all exceptions during probing, they shouldn't halt setup + await warn_on_wrong_silabs_firmware( + hass, config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] + ) + except AlreadyRunningEZSP as ezsp_exc: + raise ConfigEntryNotReady from ezsp_exc + + raise repairs.async_delete_blocking_issues(hass) + manufacturer = zha_gateway.state.node_info.manufacturer + model = zha_gateway.state.node_info.model + + if manufacturer is None and model is None: + manufacturer = "Unknown" + model = "Unknown" + device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, - connections={(dr.CONNECTION_ZIGBEE, str(zha_gateway.coordinator_ieee))}, - identifiers={(DOMAIN, str(zha_gateway.coordinator_ieee))}, + connections={(dr.CONNECTION_ZIGBEE, str(zha_gateway.state.node_info.ieee))}, + identifiers={(DOMAIN, str(zha_gateway.state.node_info.ieee))}, name="Zigbee Coordinator", - manufacturer="ZHA", - model=zha_gateway.radio_description, + manufacturer=manufacturer, + model=model, + sw_version=zha_gateway.state.node_info.version, ) websocket_api.async_load_api(hass) @@ -267,5 +295,23 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> config_entry.version = 3 hass.config_entries.async_update_entry(config_entry, data=data) + if config_entry.version == 3: + data = {**config_entry.data} + + if not data[CONF_DEVICE].get(CONF_BAUDRATE): + data[CONF_DEVICE][CONF_BAUDRATE] = { + "deconz": 38400, + "xbee": 57600, + "ezsp": 57600, + "znp": 115200, + "zigate": 115200, + }[data[CONF_RADIO_TYPE]] + + if not data[CONF_DEVICE].get(CONF_FLOW_CONTROL): + data[CONF_DEVICE][CONF_FLOW_CONTROL] = None + + config_entry.version = 4 + hass.config_entries.async_update_entry(config_entry, data=data) + _LOGGER.info("Migration to version %s successful", config_entry.version) return True diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index 1b6bbee5159..60cf917d9f6 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -27,12 +27,13 @@ from homeassistant.util import dt as dt_util from .core.const import ( CONF_BAUDRATE, - CONF_FLOWCONTROL, + CONF_FLOW_CONTROL, CONF_RADIO_TYPE, DOMAIN, RadioType, ) from .radio_manager import ( + DEVICE_SCHEMA, HARDWARE_DISCOVERY_SCHEMA, RECOMMENDED_RADIOS, ProbeResult, @@ -42,7 +43,7 @@ from .radio_manager import ( CONF_MANUAL_PATH = "Enter Manually" SUPPORTED_PORT_SETTINGS = ( CONF_BAUDRATE, - CONF_FLOWCONTROL, + CONF_FLOW_CONTROL, ) DECONZ_DOMAIN = "deconz" @@ -160,7 +161,7 @@ class BaseZhaFlow(FlowHandler): return self.async_create_entry( title=self._title, data={ - CONF_DEVICE: device_settings, + CONF_DEVICE: DEVICE_SCHEMA(device_settings), CONF_RADIO_TYPE: self._radio_mgr.radio_type.name, }, ) @@ -281,7 +282,7 @@ class BaseZhaFlow(FlowHandler): for ( param, value, - ) in self._radio_mgr.radio_type.controller.SCHEMA_DEVICE.schema.items(): + ) in DEVICE_SCHEMA.schema.items(): if param not in SUPPORTED_PORT_SETTINGS: continue @@ -488,7 +489,7 @@ class BaseZhaFlow(FlowHandler): class ZhaConfigFlowHandler(BaseZhaFlow, config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow.""" - VERSION = 3 + VERSION = 4 async def _set_unique_id_or_update_path( self, unique_id: str, device_path: str @@ -646,22 +647,17 @@ class ZhaConfigFlowHandler(BaseZhaFlow, config_entries.ConfigFlow, domain=DOMAIN name = discovery_data["name"] radio_type = self._radio_mgr.parse_radio_type(discovery_data["radio_type"]) - - try: - device_settings = radio_type.controller.SCHEMA_DEVICE( - discovery_data["port"] - ) - except vol.Invalid: - return self.async_abort(reason="invalid_hardware_data") + device_settings = discovery_data["port"] + device_path = device_settings[CONF_DEVICE_PATH] await self._set_unique_id_or_update_path( - unique_id=f"{name}_{radio_type.name}_{device_settings[CONF_DEVICE_PATH]}", - device_path=device_settings[CONF_DEVICE_PATH], + unique_id=f"{name}_{radio_type.name}_{device_path}", + device_path=device_path, ) self._title = name self._radio_mgr.radio_type = radio_type - self._radio_mgr.device_path = device_settings[CONF_DEVICE_PATH] + self._radio_mgr.device_path = device_path self._radio_mgr.device_settings = device_settings self.context["title_placeholders"] = {CONF_NAME: name} diff --git a/homeassistant/components/zha/core/cluster_handlers/closures.py b/homeassistant/components/zha/core/cluster_handlers/closures.py index 980a6f88a75..16c7aef89ad 100644 --- a/homeassistant/components/zha/core/cluster_handlers/closures.py +++ b/homeassistant/components/zha/core/cluster_handlers/closures.py @@ -1,6 +1,9 @@ """Closures cluster handlers module for Zigbee Home Automation.""" -from typing import Any +from __future__ import annotations +from typing import TYPE_CHECKING, Any + +import zigpy.zcl from zigpy.zcl.clusters import closures from homeassistant.core import callback @@ -9,6 +12,9 @@ from .. import registries from ..const import REPORT_CONFIG_IMMEDIATE, SIGNAL_ATTR_UPDATED from . import AttrReportConfig, ClientClusterHandler, ClusterHandler +if TYPE_CHECKING: + from ..endpoint import Endpoint + @registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(closures.DoorLock.cluster_id) class DoorLockClusterHandler(ClusterHandler): @@ -139,6 +145,14 @@ class WindowCovering(ClusterHandler): ), ) + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize WindowCovering cluster handler.""" + super().__init__(cluster, endpoint) + + if self.cluster.endpoint.model == "lumi.curtain.agl001": + self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() + self.ZCL_INIT_ATTRS["window_covering_mode"] = True + async def async_update(self): """Retrieve latest state.""" result = await self.get_attribute_value( diff --git a/homeassistant/components/zha/core/cluster_handlers/general.py b/homeassistant/components/zha/core/cluster_handlers/general.py index 6ca4e420d5f..8bc6902b4ff 100644 --- a/homeassistant/components/zha/core/cluster_handlers/general.py +++ b/homeassistant/components/zha/core/cluster_handlers/general.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Coroutine from typing import TYPE_CHECKING, Any +from zhaquirks.quirk_ids import TUYA_PLUG_ONOFF import zigpy.exceptions import zigpy.types as t import zigpy.zcl @@ -347,26 +348,10 @@ class OnOffClusterHandler(ClusterHandler): super().__init__(cluster, endpoint) self._off_listener = None - if self.cluster.endpoint.model not in ( - "TS011F", - "TS0121", - "TS0001", - "TS0002", - "TS0003", - "TS0004", - ): - return - - try: - self.cluster.find_attribute("backlight_mode") - except KeyError: - return - - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["backlight_mode"] = True - self.ZCL_INIT_ATTRS["power_on_state"] = True - - if self.cluster.endpoint.model == "TS011F": + if endpoint.device.quirk_id == TUYA_PLUG_ONOFF: + self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() + self.ZCL_INIT_ATTRS["backlight_mode"] = True + self.ZCL_INIT_ATTRS["power_on_state"] = True self.ZCL_INIT_ATTRS["child_lock"] = True @classmethod diff --git a/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py b/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py index f2e5dafa099..99c1e954a0e 100644 --- a/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py +++ b/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py @@ -5,6 +5,7 @@ import logging from typing import TYPE_CHECKING, Any from zhaquirks.inovelli.types import AllLEDEffectType, SingleLEDEffectType +from zhaquirks.quirk_ids import TUYA_PLUG_MANUFACTURER import zigpy.zcl from homeassistant.core import callback @@ -72,25 +73,7 @@ class TuyaClusterHandler(ClusterHandler): def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: """Initialize TuyaClusterHandler.""" super().__init__(cluster, endpoint) - - if self.cluster.endpoint.manufacturer in ( - "_TZE200_7tdtqgwv", - "_TZE200_amp6tsvy", - "_TZE200_oisqyl4o", - "_TZE200_vhy3iakz", - "_TZ3000_uim07oem", - "_TZE200_wfxuhoea", - "_TZE200_tviaymwx", - "_TZE200_g1ib5ldv", - "_TZE200_wunufsil", - "_TZE200_7deq70b8", - "_TZE200_tz32mtza", - "_TZE200_2hf7x9n3", - "_TZE200_aqnazj70", - "_TZE200_1ozguk6x", - "_TZE200_k6jhsr0q", - "_TZE200_9mahtqtg", - ): + if endpoint.device.quirk_id == TUYA_PLUG_MANUFACTURER: self.ZCL_INIT_ATTRS = { "backlight_mode": True, "power_on_state": True, @@ -241,49 +224,94 @@ class InovelliConfigEntityClusterHandler(ClusterHandler): """Inovelli Configuration Entity cluster handler.""" REPORT_CONFIG = () - ZCL_INIT_ATTRS = { - "dimming_speed_up_remote": True, - "dimming_speed_up_local": True, - "ramp_rate_off_to_on_local": True, - "ramp_rate_off_to_on_remote": True, - "dimming_speed_down_remote": True, - "dimming_speed_down_local": True, - "ramp_rate_on_to_off_local": True, - "ramp_rate_on_to_off_remote": True, - "minimum_level": True, - "maximum_level": True, - "invert_switch": True, - "auto_off_timer": True, - "default_level_local": True, - "default_level_remote": True, - "state_after_power_restored": True, - "load_level_indicator_timeout": True, - "active_power_reports": True, - "periodic_power_and_energy_reports": True, - "active_energy_reports": True, - "power_type": False, - "switch_type": False, - "increased_non_neutral_output": True, - "button_delay": False, - "smart_bulb_mode": False, - "double_tap_up_enabled": True, - "double_tap_down_enabled": True, - "double_tap_up_level": True, - "double_tap_down_level": True, - "led_color_when_on": True, - "led_color_when_off": True, - "led_intensity_when_on": True, - "led_intensity_when_off": True, - "led_scaling_mode": True, - "aux_switch_scenes": True, - "binding_off_to_on_sync_level": True, - "local_protection": False, - "output_mode": False, - "on_off_led_mode": True, - "firmware_progress_led": True, - "relay_click_in_on_off_mode": True, - "disable_clear_notifications_double_tap": True, - } + + def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: + """Initialize Inovelli cluster handler.""" + super().__init__(cluster, endpoint) + if self.cluster.endpoint.model == "VZM31-SN": + self.ZCL_INIT_ATTRS = { + "dimming_speed_up_remote": True, + "dimming_speed_up_local": True, + "ramp_rate_off_to_on_local": True, + "ramp_rate_off_to_on_remote": True, + "dimming_speed_down_remote": True, + "dimming_speed_down_local": True, + "ramp_rate_on_to_off_local": True, + "ramp_rate_on_to_off_remote": True, + "minimum_level": True, + "maximum_level": True, + "invert_switch": True, + "auto_off_timer": True, + "default_level_local": True, + "default_level_remote": True, + "state_after_power_restored": True, + "load_level_indicator_timeout": True, + "active_power_reports": True, + "periodic_power_and_energy_reports": True, + "active_energy_reports": True, + "power_type": False, + "switch_type": False, + "increased_non_neutral_output": True, + "button_delay": False, + "smart_bulb_mode": False, + "double_tap_up_enabled": True, + "double_tap_down_enabled": True, + "double_tap_up_level": True, + "double_tap_down_level": True, + "led_color_when_on": True, + "led_color_when_off": True, + "led_intensity_when_on": True, + "led_intensity_when_off": True, + "led_scaling_mode": True, + "aux_switch_scenes": True, + "binding_off_to_on_sync_level": True, + "local_protection": False, + "output_mode": False, + "on_off_led_mode": True, + "firmware_progress_led": True, + "relay_click_in_on_off_mode": True, + "disable_clear_notifications_double_tap": True, + } + elif self.cluster.endpoint.model == "VZM35-SN": + self.ZCL_INIT_ATTRS = { + "dimming_speed_up_remote": True, + "dimming_speed_up_local": True, + "ramp_rate_off_to_on_local": True, + "ramp_rate_off_to_on_remote": True, + "dimming_speed_down_remote": True, + "dimming_speed_down_local": True, + "ramp_rate_on_to_off_local": True, + "ramp_rate_on_to_off_remote": True, + "minimum_level": True, + "maximum_level": True, + "invert_switch": True, + "auto_off_timer": True, + "default_level_local": True, + "default_level_remote": True, + "state_after_power_restored": True, + "load_level_indicator_timeout": True, + "power_type": False, + "switch_type": False, + "non_neutral_aux_med_gear_learn_value": True, + "non_neutral_aux_low_gear_learn_value": True, + "quick_start_time": False, + "button_delay": False, + "smart_fan_mode": False, + "double_tap_up_enabled": True, + "double_tap_down_enabled": True, + "double_tap_up_level": True, + "double_tap_down_level": True, + "led_color_when_on": True, + "led_color_when_off": True, + "led_intensity_when_on": True, + "led_intensity_when_off": True, + "aux_switch_scenes": True, + "local_protection": False, + "output_mode": False, + "on_off_led_mode": True, + "firmware_progress_led": True, + "smart_fan_led_display_levels": True, + } async def issue_all_led_effect( self, diff --git a/homeassistant/components/zha/core/const.py b/homeassistant/components/zha/core/const.py index 9874fddc598..f89ed8d9a52 100644 --- a/homeassistant/components/zha/core/const.py +++ b/homeassistant/components/zha/core/const.py @@ -127,6 +127,7 @@ CONF_ALARM_FAILED_TRIES = "alarm_failed_tries" CONF_ALARM_ARM_REQUIRES_CODE = "alarm_arm_requires_code" CONF_BAUDRATE = "baudrate" +CONF_FLOW_CONTROL = "flow_control" CONF_CUSTOM_QUIRKS_PATH = "custom_quirks_path" CONF_DEFAULT_LIGHT_TRANSITION = "default_light_transition" CONF_DEVICE_CONFIG = "device_config" @@ -136,7 +137,6 @@ CONF_ALWAYS_PREFER_XY_COLOR_MODE = "always_prefer_xy_color_mode" CONF_GROUP_MEMBERS_ASSUME_STATE = "group_members_assume_state" CONF_ENABLE_IDENTIFY_ON_JOIN = "enable_identify_on_join" CONF_ENABLE_QUIRKS = "enable_quirks" -CONF_FLOWCONTROL = "flow_control" CONF_RADIO_TYPE = "radio_type" CONF_USB_PATH = "usb_path" CONF_USE_THREAD = "use_thread" diff --git a/homeassistant/components/zha/core/device.py b/homeassistant/components/zha/core/device.py index 44acbb172fc..0ce6f47b61e 100644 --- a/homeassistant/components/zha/core/device.py +++ b/homeassistant/components/zha/core/device.py @@ -285,7 +285,7 @@ class ZHADevice(LogMixin): if not self.is_coordinator: return False - return self.ieee == self.gateway.coordinator_ieee + return self.ieee == self.gateway.state.node_info.ieee @property def is_end_device(self) -> bool | None: diff --git a/homeassistant/components/zha/core/gateway.py b/homeassistant/components/zha/core/gateway.py index b4c02d33015..5c038a2d7f8 100644 --- a/homeassistant/components/zha/core/gateway.py +++ b/homeassistant/components/zha/core/gateway.py @@ -11,7 +11,7 @@ import itertools import logging import re import time -from typing import TYPE_CHECKING, Any, NamedTuple +from typing import TYPE_CHECKING, Any, NamedTuple, Self from zigpy.application import ControllerApplication from zigpy.config import ( @@ -24,15 +24,14 @@ from zigpy.config import ( ) import zigpy.device import zigpy.endpoint -from zigpy.exceptions import NetworkSettingsInconsistent, TransientConnectionError import zigpy.group +from zigpy.state import State from zigpy.types.named import EUI64 from homeassistant import __path__ as HOMEASSISTANT_PATH from homeassistant.components.system_log import LogEntry, _figure_out_source from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_send @@ -66,8 +65,6 @@ from .const import ( SIGNAL_ADD_ENTITIES, SIGNAL_GROUP_MEMBERSHIP_CHANGE, SIGNAL_REMOVE, - STARTUP_FAILURE_DELAY_S, - STARTUP_RETRIES, UNKNOWN_MANUFACTURER, UNKNOWN_MODEL, ZHA_GW_MSG, @@ -123,10 +120,6 @@ class DevicePairingStatus(Enum): class ZHAGateway: """Gateway that handles events that happen on the ZHA Zigbee network.""" - # -- Set in async_initialize -- - application_controller: ControllerApplication - radio_description: str - def __init__( self, hass: HomeAssistant, config: ConfigType, config_entry: ConfigEntry ) -> None: @@ -135,7 +128,8 @@ class ZHAGateway: self._config = config self._devices: dict[EUI64, ZHADevice] = {} self._groups: dict[int, ZHAGroup] = {} - self.coordinator_zha_device: ZHADevice | None = None + self.application_controller: ControllerApplication = None + self.coordinator_zha_device: ZHADevice = None # type: ignore[assignment] self._device_registry: collections.defaultdict[ EUI64, list[EntityReference] ] = collections.defaultdict(list) @@ -147,13 +141,11 @@ class ZHAGateway: self._log_relay_handler = LogRelayHandler(hass, self) self.config_entry = config_entry self._unsubs: list[Callable[[], None]] = [] + self.shutting_down = False def get_application_controller_data(self) -> tuple[ControllerApplication, dict]: """Get an uninitialized instance of a zigpy `ControllerApplication`.""" - radio_type = self.config_entry.data[CONF_RADIO_TYPE] - - app_controller_cls = RadioType[radio_type].controller - self.radio_description = RadioType[radio_type].description + radio_type = RadioType[self.config_entry.data[CONF_RADIO_TYPE]] app_config = self._config.get(CONF_ZIGPY, {}) database = self._config.get( @@ -170,7 +162,7 @@ class ZHAGateway: # event loop, when a connection to a TCP coordinator fails in a specific way if ( CONF_USE_THREAD not in app_config - and RadioType[radio_type] is RadioType.ezsp + and radio_type is RadioType.ezsp and app_config[CONF_DEVICE][CONF_DEVICE_PATH].startswith("socket://") ): app_config[CONF_USE_THREAD] = False @@ -189,48 +181,40 @@ class ZHAGateway: ): app_config.setdefault(CONF_NWK, {})[CONF_NWK_CHANNEL] = 15 - return app_controller_cls, app_controller_cls.SCHEMA(app_config) + return radio_type.controller, radio_type.controller.SCHEMA(app_config) + + @classmethod + async def async_from_config( + cls, hass: HomeAssistant, config: ConfigType, config_entry: ConfigEntry + ) -> Self: + """Create an instance of a gateway from config objects.""" + instance = cls(hass, config, config_entry) + await instance.async_initialize() + return instance async def async_initialize(self) -> None: """Initialize controller and connect radio.""" discovery.PROBE.initialize(self.hass) discovery.GROUP_PROBE.initialize(self.hass) + self.shutting_down = False + app_controller_cls, app_config = self.get_application_controller_data() - self.application_controller = await app_controller_cls.new( + app = await app_controller_cls.new( config=app_config, auto_form=False, start_radio=False, ) try: - for attempt in range(STARTUP_RETRIES): - try: - await self.application_controller.startup(auto_form=True) - except TransientConnectionError as exc: - raise ConfigEntryNotReady from exc - except NetworkSettingsInconsistent: - raise - except Exception as exc: # pylint: disable=broad-except - _LOGGER.debug( - "Couldn't start %s coordinator (attempt %s of %s)", - self.radio_description, - attempt + 1, - STARTUP_RETRIES, - exc_info=exc, - ) - - if attempt == STARTUP_RETRIES - 1: - raise exc - - await asyncio.sleep(STARTUP_FAILURE_DELAY_S) - else: - break + await app.startup(auto_form=True) except Exception: # Explicitly shut down the controller application on failure - await self.application_controller.shutdown() + await app.shutdown() raise + self.application_controller = app + zha_data = get_zha_data(self.hass) zha_data.gateway = self @@ -244,6 +228,17 @@ class ZHAGateway: self.application_controller.add_listener(self) self.application_controller.groups.add_listener(self) + def connection_lost(self, exc: Exception) -> None: + """Handle connection lost event.""" + if self.shutting_down: + return + + _LOGGER.debug("Connection to the radio was lost: %r", exc) + + self.hass.async_create_task( + self.hass.config_entries.async_reload(self.config_entry.entry_id) + ) + def _find_coordinator_device(self) -> zigpy.device.Device: zigpy_coordinator = self.application_controller.get_device(nwk=0x0000) @@ -258,6 +253,7 @@ class ZHAGateway: @callback def async_load_devices(self) -> None: """Restore ZHA devices from zigpy application state.""" + for zigpy_device in self.application_controller.devices.values(): zha_device = self._async_get_or_create_device(zigpy_device, restored=True) delta_msg = "not known" @@ -280,6 +276,7 @@ class ZHAGateway: @callback def async_load_groups(self) -> None: """Initialize ZHA groups.""" + for group_id in self.application_controller.groups: group = self.application_controller.groups[group_id] zha_group = self._async_get_or_create_group(group) @@ -521,9 +518,9 @@ class ZHAGateway: entity_registry.async_remove(entry.entity_id) @property - def coordinator_ieee(self) -> EUI64: - """Return the active coordinator's IEEE address.""" - return self.application_controller.state.node_info.ieee + def state(self) -> State: + """Return the active coordinator's network state.""" + return self.application_controller.state @property def devices(self) -> dict[EUI64, ZHADevice]: @@ -711,6 +708,7 @@ class ZHAGateway: group_id: int | None = None, ) -> ZHAGroup | None: """Create a new Zigpy Zigbee group.""" + # we start with two to fill any gaps from a user removing existing groups if group_id is None: @@ -758,19 +756,13 @@ class ZHAGateway: async def shutdown(self) -> None: """Stop ZHA Controller Application.""" _LOGGER.debug("Shutting down ZHA ControllerApplication") + self.shutting_down = True + for unsubscribe in self._unsubs: unsubscribe() for device in self.devices.values(): device.async_cleanup_handles() - # shutdown is called when the config entry unloads are processed - # there are cases where unloads are processed because of a failure of - # some sort and the application controller may not have been - # created yet - if ( - hasattr(self, "application_controller") - and self.application_controller is not None - ): - await self.application_controller.shutdown() + await self.application_controller.shutdown() def handle_message( self, diff --git a/homeassistant/components/zha/entity.py b/homeassistant/components/zha/entity.py index 05e1da7c570..b92d077907f 100644 --- a/homeassistant/components/zha/entity.py +++ b/homeassistant/components/zha/entity.py @@ -92,7 +92,7 @@ class BaseZhaEntity(LogMixin, entity.Entity): manufacturer=zha_device_info[ATTR_MANUFACTURER], model=zha_device_info[ATTR_MODEL], name=zha_device_info[ATTR_NAME], - via_device=(DOMAIN, zha_gateway.coordinator_ieee), + via_device=(DOMAIN, zha_gateway.state.node_info.ieee), ) @callback diff --git a/homeassistant/components/zha/fan.py b/homeassistant/components/zha/fan.py index 05bf3469c7b..c6b9a104885 100644 --- a/homeassistant/components/zha/fan.py +++ b/homeassistant/components/zha/fan.py @@ -13,7 +13,6 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODE, FanEntity, FanEntityFeature, - NotValidPresetModeError, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_UNAVAILABLE, Platform @@ -131,11 +130,6 @@ class BaseFan(FanEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode for the fan.""" - if preset_mode not in self.preset_modes: - raise NotValidPresetModeError( - f"The preset_mode {preset_mode} is not a valid preset_mode:" - f" {self.preset_modes}" - ) await self._async_set_fan_mode(self.preset_name_to_mode[preset_mode]) @abstractmethod diff --git a/homeassistant/components/zha/light.py b/homeassistant/components/zha/light.py index 6a01d550466..d545a331a6d 100644 --- a/homeassistant/components/zha/light.py +++ b/homeassistant/components/zha/light.py @@ -1072,7 +1072,7 @@ class HueLight(Light): @STRICT_MATCH( cluster_handler_names=CLUSTER_HANDLER_ON_OFF, aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, - manufacturers={"Jasco", "Quotra-Vision", "eWeLight", "eWeLink"}, + manufacturers={"Jasco", "Jasco Products", "Quotra-Vision", "eWeLight", "eWeLink"}, ) class ForceOnLight(Light): """Representation of a light which does not respect on/off for move_to_level_with_on_off commands.""" diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index af2c8405e5f..cd53772777a 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,16 +21,16 @@ "universal_silabs_flasher" ], "requirements": [ - "bellows==0.36.8", + "bellows==0.37.1", "pyserial==3.5", "pyserial-asyncio==0.6", - "zha-quirks==0.0.106", - "zigpy-deconz==0.21.1", - "zigpy==0.59.0", - "zigpy-xbee==0.19.0", - "zigpy-zigate==0.11.0", - "zigpy-znp==0.11.6", - "universal-silabs-flasher==0.0.14", + "zha-quirks==0.0.107", + "zigpy-deconz==0.22.0", + "zigpy==0.60.0", + "zigpy-xbee==0.20.0", + "zigpy-zigate==0.12.0", + "zigpy-znp==0.12.0", + "universal-silabs-flasher==0.0.15", "pyserial-asyncio-fast==0.11" ], "usb": [ diff --git a/homeassistant/components/zha/number.py b/homeassistant/components/zha/number.py index ae2f9e0b758..53d79d2d35f 100644 --- a/homeassistant/components/zha/number.py +++ b/homeassistant/components/zha/number.py @@ -629,7 +629,7 @@ class InovelliRemoteDimmingUpSpeed(ZHANumberConfigurationEntity): class InovelliButtonDelay(ZHANumberConfigurationEntity): """Inovelli button delay configuration entity.""" - _unique_id_suffix = "dimming_speed_up_local" + _unique_id_suffix = "button_delay" _attr_entity_category = EntityCategory.CONFIG _attr_icon: str = ICONS[3] _attr_native_min_value: float = 0 @@ -778,6 +778,22 @@ class InovelliAutoShutoffTimer(ZHANumberConfigurationEntity): _attr_translation_key: str = "auto_off_timer" +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} +) +# pylint: disable-next=hass-invalid-inheritance # needs fixing +class InovelliQuickStartTime(ZHANumberConfigurationEntity): + """Inovelli fan quick start time configuration entity.""" + + _unique_id_suffix = "quick_start_time" + _attr_entity_category = EntityCategory.CONFIG + _attr_icon: str = ICONS[3] + _attr_native_min_value: float = 0 + _attr_native_max_value: float = 10 + _attribute_name = "quick_start_time" + _attr_translation_key: str = "quick_start_time" + + @CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) # pylint: disable-next=hass-invalid-inheritance # needs fixing class InovelliLoadLevelIndicatorTimeout(ZHANumberConfigurationEntity): diff --git a/homeassistant/components/zha/radio_manager.py b/homeassistant/components/zha/radio_manager.py index d20cf752a91..d3ca03de8d8 100644 --- a/homeassistant/components/zha/radio_manager.py +++ b/homeassistant/components/zha/radio_manager.py @@ -19,6 +19,7 @@ from zigpy.config import ( CONF_DEVICE, CONF_DEVICE_PATH, CONF_NWK_BACKUP_ENABLED, + SCHEMA_DEVICE, ) from zigpy.exceptions import NetworkNotFormed @@ -58,10 +59,21 @@ RETRY_DELAY_S = 1.0 BACKUP_RETRIES = 5 MIGRATION_RETRIES = 100 + +DEVICE_SCHEMA = vol.Schema( + { + vol.Required("path"): str, + vol.Optional("baudrate", default=115200): int, + vol.Optional("flow_control", default=None): vol.In( + ["hardware", "software", None] + ), + } +) + HARDWARE_DISCOVERY_SCHEMA = vol.Schema( { vol.Required("name"): str, - vol.Required("port"): dict, + vol.Required("port"): DEVICE_SCHEMA, vol.Required("radio_type"): str, } ) @@ -204,9 +216,7 @@ class ZhaRadioManager: for radio in AUTOPROBE_RADIOS: _LOGGER.debug("Attempting to probe radio type %s", radio) - dev_config = radio.controller.SCHEMA_DEVICE( - {CONF_DEVICE_PATH: self.device_path} - ) + dev_config = SCHEMA_DEVICE({CONF_DEVICE_PATH: self.device_path}) probe_result = await radio.controller.probe(dev_config) if not probe_result: @@ -357,7 +367,7 @@ class ZhaMultiPANMigrationHelper: migration_data["new_discovery_info"]["radio_type"] ) - new_device_settings = new_radio_type.controller.SCHEMA_DEVICE( + new_device_settings = SCHEMA_DEVICE( migration_data["new_discovery_info"]["port"] ) diff --git a/homeassistant/components/zha/select.py b/homeassistant/components/zha/select.py index 46089dd5a28..2ff8b7d36b9 100644 --- a/homeassistant/components/zha/select.py +++ b/homeassistant/components/zha/select.py @@ -6,6 +6,7 @@ import functools import logging from typing import TYPE_CHECKING, Any, Self +from zhaquirks.quirk_ids import TUYA_PLUG_MANUFACTURER, TUYA_PLUG_ONOFF from zigpy import types from zigpy.zcl.clusters.general import OnOff from zigpy.zcl.clusters.security import IasWd @@ -246,29 +247,10 @@ class TuyaPowerOnState(types.enum8): @CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - models={"TS011F", "TS0121", "TS0001", "TS0002", "TS0003", "TS0004"}, + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF ) @CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_7tdtqgwv", - "_TZE200_amp6tsvy", - "_TZE200_oisqyl4o", - "_TZE200_vhy3iakz", - "_TZ3000_uim07oem", - "_TZE200_wfxuhoea", - "_TZE200_tviaymwx", - "_TZE200_g1ib5ldv", - "_TZE200_wunufsil", - "_TZE200_7deq70b8", - "_TZE200_tz32mtza", - "_TZE200_2hf7x9n3", - "_TZE200_aqnazj70", - "_TZE200_1ozguk6x", - "_TZE200_k6jhsr0q", - "_TZE200_9mahtqtg", - }, + cluster_handler_names="tuya_manufacturer", quirk_ids=TUYA_PLUG_MANUFACTURER ) class TuyaPowerOnStateSelectEntity(ZCLEnumSelectEntity): """Representation of a ZHA power on state select entity.""" @@ -288,8 +270,7 @@ class TuyaBacklightMode(types.enum8): @CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - models={"TS011F", "TS0121", "TS0001", "TS0002", "TS0003", "TS0004"}, + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF ) class TuyaBacklightModeSelectEntity(ZCLEnumSelectEntity): """Representation of a ZHA backlight mode select entity.""" @@ -310,25 +291,7 @@ class MoesBacklightMode(types.enum8): @CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_7tdtqgwv", - "_TZE200_amp6tsvy", - "_TZE200_oisqyl4o", - "_TZE200_vhy3iakz", - "_TZ3000_uim07oem", - "_TZE200_wfxuhoea", - "_TZE200_tviaymwx", - "_TZE200_g1ib5ldv", - "_TZE200_wunufsil", - "_TZE200_7deq70b8", - "_TZE200_tz32mtza", - "_TZE200_2hf7x9n3", - "_TZE200_aqnazj70", - "_TZE200_1ozguk6x", - "_TZE200_k6jhsr0q", - "_TZE200_9mahtqtg", - }, + cluster_handler_names="tuya_manufacturer", quirk_ids=TUYA_PLUG_MANUFACTURER ) class MoesBacklightModeSelectEntity(ZCLEnumSelectEntity): """Moes devices have a different backlight mode select options.""" @@ -484,7 +447,7 @@ class InovelliOutputModeEntity(ZCLEnumSelectEntity): class InovelliSwitchType(types.enum8): - """Inovelli output mode.""" + """Inovelli switch mode.""" Single_Pole = 0x00 Three_Way_Dumb = 0x01 @@ -493,7 +456,7 @@ class InovelliSwitchType(types.enum8): @CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM31-SN"} ) class InovelliSwitchTypeEntity(ZCLEnumSelectEntity): """Inovelli switch type control.""" @@ -504,6 +467,25 @@ class InovelliSwitchTypeEntity(ZCLEnumSelectEntity): _attr_translation_key: str = "switch_type" +class InovelliFanSwitchType(types.enum1): + """Inovelli fan switch mode.""" + + Load_Only = 0x00 + Three_Way_AUX = 0x01 + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} +) +class InovelliFanSwitchTypeEntity(ZCLEnumSelectEntity): + """Inovelli fan switch type control.""" + + _unique_id_suffix = "switch_type" + _attribute_name = "switch_type" + _enum = InovelliFanSwitchType + _attr_translation_key: str = "switch_type" + + class InovelliLedScalingMode(types.enum1): """Inovelli led mode.""" @@ -523,6 +505,34 @@ class InovelliLedScalingModeEntity(ZCLEnumSelectEntity): _attr_translation_key: str = "led_scaling_mode" +class InovelliFanLedScalingMode(types.enum8): + """Inovelli fan led mode.""" + + VZM31SN = 0x00 + Grade_1 = 0x01 + Grade_2 = 0x02 + Grade_3 = 0x03 + Grade_4 = 0x04 + Grade_5 = 0x05 + Grade_6 = 0x06 + Grade_7 = 0x07 + Grade_8 = 0x08 + Grade_9 = 0x09 + Adaptive = 0x0A + + +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} +) +class InovelliFanLedScalingModeEntity(ZCLEnumSelectEntity): + """Inovelli fan switch led mode control.""" + + _unique_id_suffix = "smart_fan_led_display_levels" + _attribute_name = "smart_fan_led_display_levels" + _enum = InovelliFanLedScalingMode + _attr_translation_key: str = "smart_fan_led_display_levels" + + class InovelliNonNeutralOutput(types.enum1): """Inovelli non neutral output selection.""" diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 22c2810ad23..18bb3ae4f82 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -721,6 +721,9 @@ }, "away_preset_temperature": { "name": "Away preset temperature" + }, + "quick_start_time": { + "name": "Quick start time" } }, "select": { @@ -766,6 +769,9 @@ "led_scaling_mode": { "name": "Led scaling mode" }, + "smart_fan_led_display_levels": { + "name": "Smart fan led display levels" + }, "increased_non_neutral_output": { "name": "Non neutral output" }, @@ -878,6 +884,9 @@ "smart_bulb_mode": { "name": "Smart bulb mode" }, + "smart_fan_mode": { + "name": "Smart fan mode" + }, "double_tap_up_enabled": { "name": "Double tap up enabled" }, diff --git a/homeassistant/components/zha/switch.py b/homeassistant/components/zha/switch.py index e49bc44b822..71c6e9d90ad 100644 --- a/homeassistant/components/zha/switch.py +++ b/homeassistant/components/zha/switch.py @@ -5,6 +5,7 @@ import functools import logging from typing import TYPE_CHECKING, Any, Self +from zhaquirks.quirk_ids import TUYA_PLUG_ONOFF from zigpy.zcl.clusters.general import OnOff from zigpy.zcl.foundation import Status @@ -363,6 +364,17 @@ class InovelliSmartBulbMode(ZHASwitchConfigurationEntity): _attr_translation_key = "smart_bulb_mode" +@CONFIG_DIAGNOSTIC_MATCH( + cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} +) +class InovelliSmartFanMode(ZHASwitchConfigurationEntity): + """Inovelli smart fan mode control.""" + + _unique_id_suffix = "smart_fan_mode" + _attribute_name = "smart_fan_mode" + _attr_translation_key = "smart_fan_mode" + + @CONFIG_DIAGNOSTIC_MATCH( cluster_handler_names=CLUSTER_HANDLER_INOVELLI, ) @@ -488,8 +500,7 @@ class AqaraPetFeederChildLock(ZHASwitchConfigurationEntity): @CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - models={"TS011F"}, + cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF ) class TuyaChildLockSwitch(ZHASwitchConfigurationEntity): """Representation of a child lock configuration entity.""" diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index a917aa44889..9e50b55830c 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -393,7 +393,7 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_subscribe_node_status) websocket_api.async_register_command(hass, websocket_node_status) websocket_api.async_register_command(hass, websocket_node_metadata) - websocket_api.async_register_command(hass, websocket_node_comments) + websocket_api.async_register_command(hass, websocket_node_alerts) websocket_api.async_register_command(hass, websocket_add_node) websocket_api.async_register_command(hass, websocket_grant_security_classes) websocket_api.async_register_command(hass, websocket_validate_dsk_and_enter_pin) @@ -616,22 +616,25 @@ async def websocket_node_metadata( @websocket_api.websocket_command( { - vol.Required(TYPE): "zwave_js/node_comments", + vol.Required(TYPE): "zwave_js/node_alerts", vol.Required(DEVICE_ID): str, } ) @websocket_api.async_response @async_get_node -async def websocket_node_comments( +async def websocket_node_alerts( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any], node: Node, ) -> None: - """Get the comments of a Z-Wave JS node.""" + """Get the alerts for a Z-Wave JS node.""" connection.send_result( msg[ID], - {"comments": node.device_config.metadata.comments}, + { + "comments": node.device_config.metadata.comments, + "is_embedded": node.device_config.is_embedded, + }, ) diff --git a/homeassistant/components/zwave_js/binary_sensor.py b/homeassistant/components/zwave_js/binary_sensor.py index ef5cdd1b1d2..acd6780d39f 100644 --- a/homeassistant/components/zwave_js/binary_sensor.py +++ b/homeassistant/components/zwave_js/binary_sensor.py @@ -276,9 +276,7 @@ async def async_setup_entry( if state_key == "0": continue - notification_description: NotificationZWaveJSEntityDescription | None = ( - None - ) + notification_description: NotificationZWaveJSEntityDescription | None = None for description in NOTIFICATION_SENSOR_MAPPINGS: if ( diff --git a/homeassistant/components/zwave_js/const.py b/homeassistant/components/zwave_js/const.py index acc1da4e51a..656620d01dd 100644 --- a/homeassistant/components/zwave_js/const.py +++ b/homeassistant/components/zwave_js/const.py @@ -99,6 +99,7 @@ SERVICE_REFRESH_VALUE = "refresh_value" SERVICE_RESET_METER = "reset_meter" SERVICE_SET_CONFIG_PARAMETER = "set_config_parameter" SERVICE_SET_LOCK_USERCODE = "set_lock_usercode" +SERVICE_SET_LOCK_CONFIGURATION = "set_lock_configuration" SERVICE_SET_VALUE = "set_value" ATTR_NODES = "nodes" @@ -118,6 +119,13 @@ ATTR_METER_TYPE_NAME = "meter_type_name" # invoke CC API ATTR_METHOD_NAME = "method_name" ATTR_PARAMETERS = "parameters" +# lock set configuration +ATTR_AUTO_RELOCK_TIME = "auto_relock_time" +ATTR_BLOCK_TO_BLOCK = "block_to_block" +ATTR_HOLD_AND_RELEASE_TIME = "hold_and_release_time" +ATTR_LOCK_TIMEOUT = "lock_timeout" +ATTR_OPERATION_TYPE = "operation_type" +ATTR_TWIST_ASSIST = "twist_assist" ADDON_SLUG = "core_zwave_js" diff --git a/homeassistant/components/zwave_js/cover.py b/homeassistant/components/zwave_js/cover.py index 364eafd8caf..27919a17614 100644 --- a/homeassistant/components/zwave_js/cover.py +++ b/homeassistant/components/zwave_js/cover.py @@ -18,6 +18,7 @@ from zwave_js_server.const.command_class.multilevel_switch import ( from zwave_js_server.const.command_class.window_covering import ( NO_POSITION_PROPERTY_KEYS, NO_POSITION_SUFFIX, + WINDOW_COVERING_LEVEL_CHANGE_UP_PROPERTY, SlatStates, ) from zwave_js_server.model.driver import Driver @@ -369,7 +370,7 @@ class ZWaveWindowCovering(CoverPositionMixin, CoverTiltMixin): set_values_func( value, stop_value=self.get_zwave_value( - "levelChangeUp", + WINDOW_COVERING_LEVEL_CHANGE_UP_PROPERTY, value_property_key=value.property_key, ), ) diff --git a/homeassistant/components/zwave_js/discovery.py b/homeassistant/components/zwave_js/discovery.py index 39d8c0e8855..dfe2294e710 100644 --- a/homeassistant/components/zwave_js/discovery.py +++ b/homeassistant/components/zwave_js/discovery.py @@ -530,6 +530,68 @@ DISCOVERY_SCHEMAS = [ primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, assumed_state=True, ), + # Heatit Z-TRM6 + ZWaveDiscoverySchema( + platform=Platform.CLIMATE, + hint="dynamic_current_temp", + manufacturer_id={0x019B}, + product_id={0x3001}, + product_type={0x0030}, + primary_value=ZWaveValueDiscoverySchema( + command_class={CommandClass.THERMOSTAT_MODE}, + property={THERMOSTAT_MODE_PROPERTY}, + type={ValueType.NUMBER}, + ), + data_template=DynamicCurrentTempClimateDataTemplate( + lookup_table={ + # Floor sensor + "Floor": ZwaveValueID( + property_=THERMOSTAT_CURRENT_TEMP_PROPERTY, + command_class=CommandClass.SENSOR_MULTILEVEL, + endpoint=4, + ), + # Internal sensor + "Internal": ZwaveValueID( + property_=THERMOSTAT_CURRENT_TEMP_PROPERTY, + command_class=CommandClass.SENSOR_MULTILEVEL, + endpoint=2, + ), + # Internal with limit by floor sensor + "Internal with floor limit": ZwaveValueID( + property_=THERMOSTAT_CURRENT_TEMP_PROPERTY, + command_class=CommandClass.SENSOR_MULTILEVEL, + endpoint=2, + ), + # External sensor (connected to device) + "External": ZwaveValueID( + property_=THERMOSTAT_CURRENT_TEMP_PROPERTY, + command_class=CommandClass.SENSOR_MULTILEVEL, + endpoint=3, + ), + # External sensor (connected to device) with limit by floor sensor (2x sensors) + "External with floor limit": ZwaveValueID( + property_=THERMOSTAT_CURRENT_TEMP_PROPERTY, + command_class=CommandClass.SENSOR_MULTILEVEL, + endpoint=3, + ), + # PWER - Power regulator mode (no sensor used). + # This mode is not supported by the climate entity. + # Heating is set by adjusting parameter 25. + # P25: Set % of time the relay should be active when using PWER mode. + # (30-minute duty cycle) + # Use the air temperature as current temperature in the climate entity + # as we have nothing else. + "Power regulator": ZwaveValueID( + property_=THERMOSTAT_CURRENT_TEMP_PROPERTY, + command_class=CommandClass.SENSOR_MULTILEVEL, + endpoint=2, + ), + }, + dependent_value=ZwaveValueID( + property_=2, command_class=CommandClass.CONFIGURATION, endpoint=0 + ), + ), + ), # Heatit Z-TRM3 ZWaveDiscoverySchema( platform=Platform.CLIMATE, @@ -664,7 +726,14 @@ DISCOVERY_SCHEMAS = [ # locks # Door Lock CC ZWaveDiscoverySchema( - platform=Platform.LOCK, primary_value=DOOR_LOCK_CURRENT_MODE_SCHEMA + platform=Platform.LOCK, + primary_value=DOOR_LOCK_CURRENT_MODE_SCHEMA, + allow_multi=True, + ), + ZWaveDiscoverySchema( + platform=Platform.SELECT, + primary_value=DOOR_LOCK_CURRENT_MODE_SCHEMA, + hint="door_lock", ), # Only discover the Lock CC if the Door Lock CC isn't also present on the node ZWaveDiscoverySchema( diff --git a/homeassistant/components/zwave_js/fan.py b/homeassistant/components/zwave_js/fan.py index d0630649765..d4247b65c8b 100644 --- a/homeassistant/components/zwave_js/fan.py +++ b/homeassistant/components/zwave_js/fan.py @@ -18,7 +18,6 @@ from homeassistant.components.fan import ( DOMAIN as FAN_DOMAIN, FanEntity, FanEntityFeature, - NotValidPresetModeError, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback @@ -181,11 +180,6 @@ class ValueMappingZwaveFan(ZwaveFan): await self._async_set_value(self._target_value, zwave_value) return - raise NotValidPresetModeError( - f"The preset_mode {preset_mode} is not a valid preset_mode:" - f" {self.preset_modes}" - ) - @property def available(self) -> bool: """Return whether the entity is available.""" diff --git a/homeassistant/components/zwave_js/lock.py b/homeassistant/components/zwave_js/lock.py index 5457916a1e1..59faf7fbbb6 100644 --- a/homeassistant/components/zwave_js/lock.py +++ b/homeassistant/components/zwave_js/lock.py @@ -11,10 +11,12 @@ from zwave_js_server.const.command_class.lock import ( ATTR_USERCODE, LOCK_CMD_CLASS_TO_LOCKED_STATE_MAP, LOCK_CMD_CLASS_TO_PROPERTY_MAP, + DoorLockCCConfigurationSetOptions, DoorLockMode, + OperationType, ) from zwave_js_server.exceptions import BaseZwaveJSServerError -from zwave_js_server.util.lock import clear_usercode, set_usercode +from zwave_js_server.util.lock import clear_usercode, set_configuration, set_usercode from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockEntity from homeassistant.config_entries import ConfigEntry @@ -26,10 +28,17 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( + ATTR_AUTO_RELOCK_TIME, + ATTR_BLOCK_TO_BLOCK, + ATTR_HOLD_AND_RELEASE_TIME, + ATTR_LOCK_TIMEOUT, + ATTR_OPERATION_TYPE, + ATTR_TWIST_ASSIST, DATA_CLIENT, DOMAIN, LOGGER, SERVICE_CLEAR_LOCK_USERCODE, + SERVICE_SET_LOCK_CONFIGURATION, SERVICE_SET_LOCK_USERCODE, ) from .discovery import ZwaveDiscoveryInfo @@ -47,6 +56,7 @@ STATE_TO_ZWAVE_MAP: dict[int, dict[str, int | bool]] = { STATE_LOCKED: True, }, } +UNIT16_SCHEMA = vol.All(vol.Coerce(int), vol.Range(min=0, max=65535)) async def async_setup_entry( @@ -92,6 +102,24 @@ async def async_setup_entry( "async_clear_lock_usercode", ) + platform.async_register_entity_service( + SERVICE_SET_LOCK_CONFIGURATION, + { + vol.Required(ATTR_OPERATION_TYPE): vol.All( + cv.string, + vol.Upper, + vol.In(["TIMED", "CONSTANT"]), + lambda x: OperationType[x], + ), + vol.Optional(ATTR_LOCK_TIMEOUT): UNIT16_SCHEMA, + vol.Optional(ATTR_AUTO_RELOCK_TIME): UNIT16_SCHEMA, + vol.Optional(ATTR_HOLD_AND_RELEASE_TIME): UNIT16_SCHEMA, + vol.Optional(ATTR_TWIST_ASSIST): vol.Coerce(bool), + vol.Optional(ATTR_BLOCK_TO_BLOCK): vol.Coerce(bool), + }, + "async_set_lock_configuration", + ) + class ZWaveLock(ZWaveBaseEntity, LockEntity): """Representation of a Z-Wave lock.""" @@ -138,9 +166,10 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): await set_usercode(self.info.node, code_slot, usercode) except BaseZwaveJSServerError as err: raise HomeAssistantError( - f"Unable to set lock usercode on code_slot {code_slot}: {err}" + f"Unable to set lock usercode on lock {self.entity_id} code_slot " + f"{code_slot}: {err}" ) from err - LOGGER.debug("User code at slot %s set", code_slot) + LOGGER.debug("User code at slot %s on lock %s set", code_slot, self.entity_id) async def async_clear_lock_usercode(self, code_slot: int) -> None: """Clear the usercode at index X on the lock.""" @@ -148,6 +177,41 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): await clear_usercode(self.info.node, code_slot) except BaseZwaveJSServerError as err: raise HomeAssistantError( - f"Unable to clear lock usercode on code_slot {code_slot}: {err}" + f"Unable to clear lock usercode on lock {self.entity_id} code_slot " + f"{code_slot}: {err}" ) from err - LOGGER.debug("User code at slot %s cleared", code_slot) + LOGGER.debug( + "User code at slot %s on lock %s cleared", code_slot, self.entity_id + ) + + async def async_set_lock_configuration( + self, + operation_type: OperationType, + lock_timeout: int | None = None, + auto_relock_time: int | None = None, + hold_and_release_time: int | None = None, + twist_assist: bool | None = None, + block_to_block: bool | None = None, + ) -> None: + """Set the lock configuration.""" + params: dict[str, Any] = {"operation_type": operation_type} + for attr, val in ( + ("lock_timeout_configuration", lock_timeout), + ("auto_relock_time", auto_relock_time), + ("hold_and_release_time", hold_and_release_time), + ("twist_assist", twist_assist), + ("block_to_block", block_to_block), + ): + if val is not None: + params[attr] = val + configuration = DoorLockCCConfigurationSetOptions(**params) + result = await set_configuration( + self.info.node.endpoints[self.info.primary_value.endpoint or 0], + configuration, + ) + if result is None: + return + msg = f"Result status is {result.status}" + if result.remaining_duration is not None: + msg += f" and remaining duration is {str(result.remaining_duration)}" + LOGGER.info("%s after setting lock configuration for %s", msg, self.entity_id) diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index f0c1dcec6b5..f2d32d499c9 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["zwave_js_server"], "quality_scale": "platinum", - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.53.1"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.54.0"], "usb": [ { "vid": "0658", diff --git a/homeassistant/components/zwave_js/select.py b/homeassistant/components/zwave_js/select.py index 3956004336a..e838949d3e1 100644 --- a/homeassistant/components/zwave_js/select.py +++ b/homeassistant/components/zwave_js/select.py @@ -5,7 +5,8 @@ from typing import cast from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import TARGET_VALUE_PROPERTY, CommandClass -from zwave_js_server.const.command_class.sound_switch import ToneID +from zwave_js_server.const.command_class.lock import TARGET_MODE_PROPERTY +from zwave_js_server.const.command_class.sound_switch import TONE_ID_PROPERTY, ToneID from zwave_js_server.model.driver import Driver from homeassistant.components.select import DOMAIN as SELECT_DOMAIN, SelectEntity @@ -46,6 +47,8 @@ async def async_setup_entry( entities.append( ZWaveConfigParameterSelectEntity(config_entry, driver, info) ) + elif info.platform_hint == "door_lock": + entities.append(ZWaveDoorLockSelectEntity(config_entry, driver, info)) else: entities.append(ZwaveSelectEntity(config_entry, driver, info)) async_add_entities(entities) @@ -95,6 +98,27 @@ class ZwaveSelectEntity(ZWaveBaseEntity, SelectEntity): await self._async_set_value(self.info.primary_value, int(key)) +class ZWaveDoorLockSelectEntity(ZwaveSelectEntity): + """Representation of a Z-Wave door lock CC mode select entity.""" + + def __init__( + self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo + ) -> None: + """Initialize a ZWaveDoorLockSelectEntity entity.""" + super().__init__(config_entry, driver, info) + self._target_value = self.get_zwave_value(TARGET_MODE_PROPERTY) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + assert self._target_value is not None + key = next( + key + for key, val in self.info.primary_value.metadata.states.items() + if val == option + ) + await self._async_set_value(self._target_value, int(key)) + + class ZWaveConfigParameterSelectEntity(ZwaveSelectEntity): """Representation of a Z-Wave config parameter select.""" @@ -125,7 +149,7 @@ class ZwaveDefaultToneSelectEntity(ZWaveBaseEntity, SelectEntity): """Initialize a ZwaveDefaultToneSelectEntity entity.""" super().__init__(config_entry, driver, info) self._tones_value = self.get_zwave_value( - "toneId", command_class=CommandClass.SOUND_SWITCH + TONE_ID_PROPERTY, command_class=CommandClass.SOUND_SWITCH ) # Entity class attributes diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index cb8e726bf32..81809e3fbeb 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -29,6 +29,65 @@ set_lock_usercode: selector: text: +set_lock_configuration: + target: + entity: + domain: lock + integration: zwave_js + fields: + operation_type: + required: true + example: timed + selector: + select: + options: + - constant + - timed + lock_timeout: + required: false + example: 1 + selector: + number: + min: 0 + max: 65535 + unit_of_measurement: sec + outside_handles_can_open_door_configuration: + required: false + example: [true, true, true, false] + selector: + object: + inside_handles_can_open_door_configuration: + required: false + example: [true, true, true, false] + selector: + object: + auto_relock_time: + required: false + example: 1 + selector: + number: + min: 0 + max: 65535 + unit_of_measurement: sec + hold_and_release_time: + required: false + example: 1 + selector: + number: + min: 0 + max: 65535 + unit_of_measurement: sec + twist_assist: + required: false + example: true + selector: + boolean: + block_to_block: + required: false + example: true + selector: + boolean: + set_config_parameter: target: entity: diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 71c6b93e2bd..19a47450080 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -385,6 +385,44 @@ "description": "The Notification Event number as defined in the Z-Wave specs." } } + }, + "set_lock_configuration": { + "name": "Set lock configuration", + "description": "Sets the configuration for a lock.", + "fields": { + "operation_type": { + "name": "Operation Type", + "description": "The operation type of the lock." + }, + "lock_timeout": { + "name": "Lock timeout", + "description": "Seconds until lock mode times out. Should only be used if operation type is `timed`." + }, + "outside_handles_can_open_door_configuration": { + "name": "Outside handles can open door configuration", + "description": "A list of four booleans which indicate which outside handles can open the door." + }, + "inside_handles_can_open_door_configuration": { + "name": "Inside handles can open door configuration", + "description": "A list of four booleans which indicate which inside handles can open the door." + }, + "auto_relock_time": { + "name": "Auto relock time", + "description": "Duration in seconds until lock returns to secure state. Only enforced when operation type is `constant`." + }, + "hold_and_release_time": { + "name": "Hold and release time", + "description": "Duration in seconds the latch stays retracted." + }, + "twist_assist": { + "name": "Twist assist", + "description": "Enable Twist Assist." + }, + "block_to_block": { + "name": "Block to block", + "description": "Enable block-to-block functionality." + } + } } } } diff --git a/homeassistant/components/zwave_js/update.py b/homeassistant/components/zwave_js/update.py index 37cfdc68569..cf743a3e85a 100644 --- a/homeassistant/components/zwave_js/update.py +++ b/homeassistant/components/zwave_js/update.py @@ -344,7 +344,8 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity): is not None and (extra_data := await self.async_get_last_extra_data()) and ( - latest_version_firmware := ZWaveNodeFirmwareUpdateExtraStoredData.from_dict( + latest_version_firmware + := ZWaveNodeFirmwareUpdateExtraStoredData.from_dict( extra_data.as_dict() ).latest_version_firmware ) diff --git a/homeassistant/config.py b/homeassistant/config.py index abe14adb2ef..b4850e372fd 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -4,18 +4,23 @@ from __future__ import annotations from collections import OrderedDict from collections.abc import Callable, Sequence from contextlib import suppress +from dataclasses import dataclass +from enum import StrEnum +from functools import reduce import logging +import operator import os from pathlib import Path import re import shutil from types import ModuleType -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse from awesomeversion import AwesomeVersion import voluptuous as vol -from voluptuous.humanize import humanize_error +from voluptuous.humanize import MAX_VALIDATION_ERROR_ITEM_LENGTH +from yaml.error import MarkedYAMLError from . import auth from .auth import mfa_modules as auth_mfa_modules, providers as auth_providers @@ -51,7 +56,7 @@ from .const import ( __version__, ) from .core import DOMAIN as CONF_CORE, ConfigSource, HomeAssistant, callback -from .exceptions import HomeAssistantError +from .exceptions import ConfigValidationError, HomeAssistantError from .generated.currencies import HISTORIC_CURRENCIES from .helpers import ( config_per_platform, @@ -69,7 +74,6 @@ from .util.yaml import SECRET_YAML, Secrets, load_yaml _LOGGER = logging.getLogger(__name__) -DATA_PERSISTENT_ERRORS = "bootstrap_persistent_errors" RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml") RE_ASCII = re.compile(r"\033\[[^m]*m") YAML_CONFIG_FILE = "configuration.yaml" @@ -82,11 +86,7 @@ SCRIPT_CONFIG_PATH = "scripts.yaml" SCENE_CONFIG_PATH = "scenes.yaml" LOAD_EXCEPTIONS = (ImportError, FileNotFoundError) -INTEGRATION_LOAD_EXCEPTIONS = ( - IntegrationNotFound, - RequirementsNotFound, - *LOAD_EXCEPTIONS, -) +INTEGRATION_LOAD_EXCEPTIONS = (IntegrationNotFound, RequirementsNotFound) SAFE_MODE_FILENAME = "safe-mode" @@ -118,6 +118,46 @@ tts: """ +class ConfigErrorTranslationKey(StrEnum): + """Config error translation keys for config errors.""" + + # translation keys with a generated config related message text + CONFIG_VALIDATION_ERR = "config_validation_err" + PLATFORM_CONFIG_VALIDATION_ERR = "platform_config_validation_err" + + # translation keys with a general static message text + COMPONENT_IMPORT_ERR = "component_import_err" + CONFIG_PLATFORM_IMPORT_ERR = "config_platform_import_err" + CONFIG_VALIDATOR_UNKNOWN_ERR = "config_validator_unknown_err" + CONFIG_SCHEMA_UNKNOWN_ERR = "config_schema_unknown_err" + PLATFORM_VALIDATOR_UNKNOWN_ERR = "platform_validator_unknown_err" + PLATFORM_COMPONENT_LOAD_ERR = "platform_component_load_err" + PLATFORM_COMPONENT_LOAD_EXC = "platform_component_load_exc" + PLATFORM_SCHEMA_VALIDATOR_ERR = "platform_schema_validator_err" + + # translation key in case multiple errors occurred + INTEGRATION_CONFIG_ERROR = "integration_config_error" + + +@dataclass +class ConfigExceptionInfo: + """Configuration exception info class.""" + + exception: Exception + translation_key: ConfigErrorTranslationKey + platform_name: str + config: ConfigType + integration_link: str | None + + +@dataclass +class IntegrationConfigInfo: + """Configuration for an integration and exception information.""" + + config: ConfigType | None + exception_info_list: list[ConfigExceptionInfo] + + def _no_duplicate_auth_provider( configs: Sequence[dict[str, Any]] ) -> Sequence[dict[str, Any]]: @@ -395,12 +435,24 @@ async def async_hass_config_yaml(hass: HomeAssistant) -> dict: secrets = Secrets(Path(hass.config.config_dir)) # Not using async_add_executor_job because this is an internal method. - config = await hass.loop.run_in_executor( - None, - load_yaml_config_file, - hass.config.path(YAML_CONFIG_FILE), - secrets, - ) + try: + config = await hass.loop.run_in_executor( + None, + load_yaml_config_file, + hass.config.path(YAML_CONFIG_FILE), + secrets, + ) + except HomeAssistantError as exc: + if not (base_exc := exc.__cause__) or not isinstance(base_exc, MarkedYAMLError): + raise + + # Rewrite path to offending YAML file to be relative the hass config dir + if base_exc.context_mark and base_exc.context_mark.name: + base_exc.context_mark.name = _relpath(hass, base_exc.context_mark.name) + if base_exc.problem_mark and base_exc.problem_mark.name: + base_exc.problem_mark.name = _relpath(hass, base_exc.problem_mark.name) + raise + core_config = config.get(CONF_CORE, {}) await merge_packages_config(hass, config, core_config.get(CONF_PACKAGES, {})) return config @@ -488,60 +540,222 @@ def process_ha_config_upgrade(hass: HomeAssistant) -> None: @callback -def async_log_exception( - ex: Exception, +def async_log_schema_error( + exc: vol.Invalid, domain: str, config: dict, hass: HomeAssistant, link: str | None = None, ) -> None: - """Log an error for configuration validation. - - This method must be run in the event loop. - """ - if hass is not None: - async_notify_setup_error(hass, domain, link) - message, is_friendly = _format_config_error(ex, domain, config, link) - _LOGGER.error(message, exc_info=not is_friendly and ex) + """Log a schema validation error.""" + message = format_schema_error(hass, exc, domain, config, link) + _LOGGER.error(message) @callback -def _format_config_error( - ex: Exception, domain: str, config: dict, link: str | None = None -) -> tuple[str, bool]: - """Generate log exception for configuration validation. +def async_log_config_validator_error( + exc: vol.Invalid | HomeAssistantError, + domain: str, + config: dict, + hass: HomeAssistant, + link: str | None = None, +) -> None: + """Log an error from a custom config validator.""" + if isinstance(exc, vol.Invalid): + async_log_schema_error(exc, domain, config, hass, link) + return - This method must be run in the event loop. + message = format_homeassistant_error(hass, exc, domain, config, link) + _LOGGER.error(message, exc_info=exc) + + +def _get_annotation(item: Any) -> tuple[str, int | str] | None: + if not hasattr(item, "__config_file__"): + return None + + return (getattr(item, "__config_file__"), getattr(item, "__line__", "?")) + + +def _get_by_path(data: dict | list, items: list[str | int]) -> Any: + """Access a nested object in root by item sequence. + + Returns None in case of error. """ - is_friendly = False - message = f"Invalid config for [{domain}]: " - if isinstance(ex, vol.Invalid): - if "extra keys not allowed" in ex.error_message: - path = "->".join(str(m) for m in ex.path) - message += ( - f"[{ex.path[-1]}] is an invalid option for [{domain}]. " - f"Check: {domain}->{path}." - ) - else: - message += f"{humanize_error(config, ex)}." - is_friendly = True - else: - message += str(ex) or repr(ex) - try: - domain_config = config.get(domain, config) - except AttributeError: - domain_config = config + return reduce(operator.getitem, items, data) # type: ignore[arg-type] + except (KeyError, IndexError, TypeError): + return None - message += ( - f" (See {getattr(domain_config, '__config_file__', '?')}, " - f"line {getattr(domain_config, '__line__', '?')})." + +def find_annotation( + config: dict | list, path: list[str | int] +) -> tuple[str, int | str] | None: + """Find file/line annotation for a node in config pointed to by path. + + If the node pointed to is a dict or list, prefer the annotation for the key in + the key/value pair defining the dict or list. + If the node is not annotated, try the parent node. + """ + + def find_annotation_for_key( + item: dict, path: list[str | int], tail: str | int + ) -> tuple[str, int | str] | None: + for key in item: + if key == tail: + if annotation := _get_annotation(key): + return annotation + break + return None + + def find_annotation_rec( + config: dict | list, path: list[str | int], tail: str | int | None + ) -> tuple[str, int | str] | None: + item = _get_by_path(config, path) + if isinstance(item, dict) and tail is not None: + if tail_annotation := find_annotation_for_key(item, path, tail): + return tail_annotation + + if ( + isinstance(item, (dict, list)) + and path + and ( + key_annotation := find_annotation_for_key( + _get_by_path(config, path[:-1]), path[:-1], path[-1] + ) + ) + ): + return key_annotation + + if annotation := _get_annotation(item): + return annotation + + if not path: + return None + + tail = path.pop() + if annotation := find_annotation_rec(config, path, tail): + return annotation + return _get_annotation(item) + + return find_annotation_rec(config, list(path), None) + + +def _relpath(hass: HomeAssistant, path: str) -> str: + """Return path relative to the Home Assistant config dir.""" + return os.path.relpath(path, hass.config.config_dir) + + +def stringify_invalid( + hass: HomeAssistant, + exc: vol.Invalid, + domain: str, + config: dict, + link: str | None, + max_sub_error_length: int, +) -> str: + """Stringify voluptuous.Invalid. + + This is an alternative to the custom __str__ implemented in + voluptuous.error.Invalid. The modifications are: + - Format the path delimited by -> instead of @data[] + - Prefix with domain, file and line of the error + - Suffix with a link to the documentation + - Give a more user friendly output for unknown options + - Give a more user friendly output for missing options + """ + message_prefix = f"Invalid config for '{domain}'" + if domain != CONF_CORE and link: + message_suffix = f", please check the docs at {link}" + else: + message_suffix = "" + if annotation := find_annotation(config, exc.path): + message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" + path = "->".join(str(m) for m in exc.path) + if exc.error_message == "extra keys not allowed": + return ( + f"{message_prefix}: '{exc.path[-1]}' is an invalid option for '{domain}', " + f"check: {path}{message_suffix}" + ) + if exc.error_message == "required key not provided": + return ( + f"{message_prefix}: required key '{exc.path[-1]}' not provided" + f"{message_suffix}" + ) + # This function is an alternative to the stringification done by + # vol.Invalid.__str__, so we need to call Exception.__str__ here + # instead of str(exc) + output = Exception.__str__(exc) + if error_type := exc.error_type: + output += " for " + error_type + offending_item_summary = repr(_get_by_path(config, exc.path)) + if len(offending_item_summary) > max_sub_error_length: + offending_item_summary = ( + f"{offending_item_summary[: max_sub_error_length - 3]}..." + ) + return ( + f"{message_prefix}: {output} '{path}', got {offending_item_summary}" + f"{message_suffix}" ) - if domain != CONF_CORE and link: - message += f" Please check the docs at {link}" - return message, is_friendly +def humanize_error( + hass: HomeAssistant, + validation_error: vol.Invalid, + domain: str, + config: dict, + link: str | None, + max_sub_error_length: int = MAX_VALIDATION_ERROR_ITEM_LENGTH, +) -> str: + """Provide a more helpful + complete validation error message. + + This is a modified version of voluptuous.error.Invalid.__str__, + the modifications make some minor changes to the formatting. + """ + if isinstance(validation_error, vol.MultipleInvalid): + return "\n".join( + sorted( + humanize_error( + hass, sub_error, domain, config, link, max_sub_error_length + ) + for sub_error in validation_error.errors + ) + ) + return stringify_invalid( + hass, validation_error, domain, config, link, max_sub_error_length + ) + + +@callback +def format_homeassistant_error( + hass: HomeAssistant, + exc: HomeAssistantError, + domain: str, + config: dict, + link: str | None = None, +) -> str: + """Format HomeAssistantError thrown by a custom config validator.""" + message_prefix = f"Invalid config for '{domain}'" + # HomeAssistantError raised by custom config validator has no path to the + # offending configuration key, use the domain key as path instead. + if annotation := find_annotation(config, [domain]): + message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" + message = f"{message_prefix}: {str(exc) or repr(exc)}" + if domain != CONF_CORE and link: + message += f", please check the docs at {link}" + + return message + + +@callback +def format_schema_error( + hass: HomeAssistant, + exc: vol.Invalid, + domain: str, + config: dict, + link: str | None = None, +) -> str: + """Format configuration validation error.""" + return humanize_error(hass, exc, domain, config, link) async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> None: @@ -663,17 +877,15 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non hac.units = get_unit_system(config[CONF_UNIT_SYSTEM]) -def _log_pkg_error(package: str, component: str, config: dict, message: str) -> None: +def _log_pkg_error( + hass: HomeAssistant, package: str, component: str, config: dict, message: str +) -> None: """Log an error while merging packages.""" - message = f"Package {package} setup failed. Integration {component} {message}" + message_prefix = f"Setup of package '{package}'" + if annotation := find_annotation(config, [CONF_CORE, CONF_PACKAGES, package]): + message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" - pack_config = config[CONF_CORE][CONF_PACKAGES].get(package, config) - message += ( - f" (See {getattr(pack_config, '__config_file__', '?')}:" - f"{getattr(pack_config, '__line__', '?')})." - ) - - _LOGGER.error(message) + _LOGGER.error("%s failed: %s", message_prefix, message) def _identify_config_schema(module: ComponentProtocol) -> str | None: @@ -750,7 +962,9 @@ async def merge_packages_config( hass: HomeAssistant, config: dict, packages: dict[str, Any], - _log_pkg_error: Callable = _log_pkg_error, + _log_pkg_error: Callable[ + [HomeAssistant, str, str, dict, str], None + ] = _log_pkg_error, ) -> dict: """Merge packages into the top-level configuration. Mutate config.""" PACKAGES_CONFIG_SCHEMA(packages) @@ -767,8 +981,17 @@ async def merge_packages_config( hass, domain ) component = integration.get_component() - except INTEGRATION_LOAD_EXCEPTIONS as ex: - _log_pkg_error(pack_name, comp_name, config, str(ex)) + except LOAD_EXCEPTIONS as exc: + _log_pkg_error( + hass, + pack_name, + comp_name, + config, + f"Integration {comp_name} caused error: {str(exc)}", + ) + continue + except INTEGRATION_LOAD_EXCEPTIONS as exc: + _log_pkg_error(hass, pack_name, comp_name, config, str(exc)) continue try: @@ -802,7 +1025,11 @@ async def merge_packages_config( if not isinstance(comp_conf, dict): _log_pkg_error( - pack_name, comp_name, config, "cannot be merged. Expected a dict." + hass, + pack_name, + comp_name, + config, + f"integration '{comp_name}' cannot be merged, expected a dict", ) continue @@ -811,37 +1038,217 @@ async def merge_packages_config( if not isinstance(config[comp_name], dict): _log_pkg_error( + hass, pack_name, comp_name, config, - "cannot be merged. Dict expected in main config.", + ( + f"integration '{comp_name}' cannot be merged, dict expected in " + "main config" + ), ) continue duplicate_key = _recursive_merge(conf=config[comp_name], package=comp_conf) if duplicate_key: _log_pkg_error( - pack_name, comp_name, config, f"has duplicate key '{duplicate_key}'" + hass, + pack_name, + comp_name, + config, + f"integration '{comp_name}' has duplicate key '{duplicate_key}'", ) return config -async def async_process_component_config( # noqa: C901 - hass: HomeAssistant, config: ConfigType, integration: Integration -) -> ConfigType | None: - """Check component configuration and return processed configuration. +@callback +def _get_log_message_and_stack_print_pref( + hass: HomeAssistant, domain: str, platform_exception: ConfigExceptionInfo +) -> tuple[str | None, bool, dict[str, str]]: + """Get message to log and print stack trace preference.""" + exception = platform_exception.exception + platform_name = platform_exception.platform_name + platform_config = platform_exception.config + link = platform_exception.integration_link - Returns None on error. + placeholders: dict[str, str] = {"domain": domain, "error": str(exception)} + + log_message_mapping: dict[ConfigErrorTranslationKey, tuple[str, bool]] = { + ConfigErrorTranslationKey.COMPONENT_IMPORT_ERR: ( + f"Unable to import {domain}: {exception}", + False, + ), + ConfigErrorTranslationKey.CONFIG_PLATFORM_IMPORT_ERR: ( + f"Error importing config platform {domain}: {exception}", + False, + ), + ConfigErrorTranslationKey.CONFIG_VALIDATOR_UNKNOWN_ERR: ( + f"Unknown error calling {domain} config validator", + True, + ), + ConfigErrorTranslationKey.CONFIG_SCHEMA_UNKNOWN_ERR: ( + f"Unknown error calling {domain} CONFIG_SCHEMA", + True, + ), + ConfigErrorTranslationKey.PLATFORM_VALIDATOR_UNKNOWN_ERR: ( + f"Unknown error validating {platform_name} platform config with {domain} " + "component platform schema", + True, + ), + ConfigErrorTranslationKey.PLATFORM_COMPONENT_LOAD_ERR: ( + f"Platform error: {domain} - {exception}", + False, + ), + ConfigErrorTranslationKey.PLATFORM_COMPONENT_LOAD_EXC: ( + f"Platform error: {domain} - {exception}", + True, + ), + ConfigErrorTranslationKey.PLATFORM_SCHEMA_VALIDATOR_ERR: ( + f"Unknown error validating config for {platform_name} platform " + f"for {domain} component with PLATFORM_SCHEMA", + True, + ), + } + log_message_show_stack_trace = log_message_mapping.get( + platform_exception.translation_key + ) + if log_message_show_stack_trace is None: + # If no pre defined log_message is set, we generate an enriched error + # message, so we can notify about it during setup + show_stack_trace = False + if isinstance(exception, vol.Invalid): + log_message = format_schema_error( + hass, exception, platform_name, platform_config, link + ) + if annotation := find_annotation(platform_config, exception.path): + placeholders["config_file"], line = annotation + placeholders["line"] = str(line) + else: + if TYPE_CHECKING: + assert isinstance(exception, HomeAssistantError) + log_message = format_homeassistant_error( + hass, exception, platform_name, platform_config, link + ) + if annotation := find_annotation(platform_config, [platform_name]): + placeholders["config_file"], line = annotation + placeholders["line"] = str(line) + show_stack_trace = True + return (log_message, show_stack_trace, placeholders) + + assert isinstance(log_message_show_stack_trace, tuple) + + return (*log_message_show_stack_trace, placeholders) + + +async def async_process_component_and_handle_errors( + hass: HomeAssistant, + config: ConfigType, + integration: Integration, + raise_on_failure: bool = False, +) -> ConfigType | None: + """Process and component configuration and handle errors. + + In case of errors: + - Print the error messages to the log. + - Raise a ConfigValidationError if raise_on_failure is set. + + Returns the integration config or `None`. + """ + integration_config_info = await async_process_component_config( + hass, config, integration + ) + return async_handle_component_errors( + hass, integration_config_info, integration, raise_on_failure + ) + + +@callback +def async_handle_component_errors( + hass: HomeAssistant, + integration_config_info: IntegrationConfigInfo, + integration: Integration, + raise_on_failure: bool = False, +) -> ConfigType | None: + """Handle component configuration errors from async_process_component_config. + + In case of errors: + - Print the error messages to the log. + - Raise a ConfigValidationError if raise_on_failure is set. + + Returns the integration config or `None`. + """ + + if not (config_exception_info := integration_config_info.exception_info_list): + return integration_config_info.config + + platform_exception: ConfigExceptionInfo + domain = integration.domain + placeholders: dict[str, str] + for platform_exception in config_exception_info: + exception = platform_exception.exception + ( + log_message, + show_stack_trace, + placeholders, + ) = _get_log_message_and_stack_print_pref(hass, domain, platform_exception) + _LOGGER.error( + log_message, + exc_info=exception if show_stack_trace else None, + ) + + if not raise_on_failure: + return integration_config_info.config + + if len(config_exception_info) == 1: + translation_key = platform_exception.translation_key + else: + translation_key = ConfigErrorTranslationKey.INTEGRATION_CONFIG_ERROR + errors = str(len(config_exception_info)) + log_message = ( + f"Failed to process component config for integration {domain} " + f"due to multiple errors ({errors}), check the logs for more information." + ) + placeholders = { + "domain": domain, + "errors": errors, + } + raise ConfigValidationError( + str(log_message), + [platform_exception.exception for platform_exception in config_exception_info], + translation_domain="homeassistant", + translation_key=translation_key, + translation_placeholders=placeholders, + ) + + +async def async_process_component_config( # noqa: C901 + hass: HomeAssistant, + config: ConfigType, + integration: Integration, +) -> IntegrationConfigInfo: + """Check component configuration. + + Returns processed configuration and exception information. This method must be run in the event loop. """ domain = integration.domain + integration_docs = integration.documentation + config_exceptions: list[ConfigExceptionInfo] = [] + try: component = integration.get_component() - except LOAD_EXCEPTIONS as ex: - _LOGGER.error("Unable to import %s: %s", domain, ex) - return None + except LOAD_EXCEPTIONS as exc: + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.COMPONENT_IMPORT_ERR, + domain, + config, + integration_docs, + ) + config_exceptions.append(exc_info) + return IntegrationConfigInfo(None, config_exceptions) # Check if the integration has a custom config validator config_validator = None @@ -852,58 +1259,101 @@ async def async_process_component_config( # noqa: C901 # If the config platform contains bad imports, make sure # that still fails. if err.name != f"{integration.pkg_path}.config": - _LOGGER.error("Error importing config platform %s: %s", domain, err) - return None + exc_info = ConfigExceptionInfo( + err, + ConfigErrorTranslationKey.CONFIG_PLATFORM_IMPORT_ERR, + domain, + config, + integration_docs, + ) + config_exceptions.append(exc_info) + return IntegrationConfigInfo(None, config_exceptions) if config_validator is not None and hasattr( config_validator, "async_validate_config" ): try: - return ( # type: ignore[no-any-return] - await config_validator.async_validate_config(hass, config) + return IntegrationConfigInfo( + await config_validator.async_validate_config(hass, config), [] ) - except (vol.Invalid, HomeAssistantError) as ex: - async_log_exception(ex, domain, config, hass, integration.documentation) - return None - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unknown error calling %s config validator", domain) - return None + except (vol.Invalid, HomeAssistantError) as exc: + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.CONFIG_VALIDATION_ERR, + domain, + config, + integration_docs, + ) + config_exceptions.append(exc_info) + return IntegrationConfigInfo(None, config_exceptions) + except Exception as exc: # pylint: disable=broad-except + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.CONFIG_VALIDATOR_UNKNOWN_ERR, + domain, + config, + integration_docs, + ) + config_exceptions.append(exc_info) + return IntegrationConfigInfo(None, config_exceptions) # No custom config validator, proceed with schema validation if hasattr(component, "CONFIG_SCHEMA"): try: - return component.CONFIG_SCHEMA(config) # type: ignore[no-any-return] - except vol.Invalid as ex: - async_log_exception(ex, domain, config, hass, integration.documentation) - return None - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unknown error calling %s CONFIG_SCHEMA", domain) - return None + return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), []) + except vol.Invalid as exc: + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.CONFIG_VALIDATION_ERR, + domain, + config, + integration_docs, + ) + config_exceptions.append(exc_info) + return IntegrationConfigInfo(None, config_exceptions) + except Exception as exc: # pylint: disable=broad-except + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.CONFIG_SCHEMA_UNKNOWN_ERR, + domain, + config, + integration_docs, + ) + config_exceptions.append(exc_info) + return IntegrationConfigInfo(None, config_exceptions) component_platform_schema = getattr( component, "PLATFORM_SCHEMA_BASE", getattr(component, "PLATFORM_SCHEMA", None) ) if component_platform_schema is None: - return config + return IntegrationConfigInfo(config, []) - platforms = [] + platforms: list[ConfigType] = [] for p_name, p_config in config_per_platform(config, domain): # Validate component specific platform schema + platform_name = f"{domain}.{p_name}" try: p_validated = component_platform_schema(p_config) - except vol.Invalid as ex: - async_log_exception(ex, domain, p_config, hass, integration.documentation) - continue - except Exception: # pylint: disable=broad-except - _LOGGER.exception( - ( - "Unknown error validating %s platform config with %s component" - " platform schema" - ), - p_name, + except vol.Invalid as exc: + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.PLATFORM_CONFIG_VALIDATION_ERR, domain, + p_config, + integration_docs, ) + config_exceptions.append(exc_info) + continue + except Exception as exc: # pylint: disable=broad-except + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.PLATFORM_SCHEMA_VALIDATOR_ERR, + str(p_name), + config, + integration_docs, + ) + config_exceptions.append(exc_info) continue # Not all platform components follow same pattern for platforms @@ -915,38 +1365,53 @@ async def async_process_component_config( # noqa: C901 try: p_integration = await async_get_integration_with_requirements(hass, p_name) - except (RequirementsNotFound, IntegrationNotFound) as ex: - _LOGGER.error("Platform error: %s - %s", domain, ex) + except (RequirementsNotFound, IntegrationNotFound) as exc: + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.PLATFORM_COMPONENT_LOAD_ERR, + platform_name, + p_config, + integration_docs, + ) + config_exceptions.append(exc_info) continue try: platform = p_integration.get_platform(domain) - except LOAD_EXCEPTIONS: - _LOGGER.exception("Platform error: %s", domain) + except LOAD_EXCEPTIONS as exc: + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.PLATFORM_COMPONENT_LOAD_EXC, + platform_name, + p_config, + integration_docs, + ) + config_exceptions.append(exc_info) continue # Validate platform specific schema if hasattr(platform, "PLATFORM_SCHEMA"): try: p_validated = platform.PLATFORM_SCHEMA(p_config) - except vol.Invalid as ex: - async_log_exception( - ex, - f"{domain}.{p_name}", + except vol.Invalid as exc: + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.PLATFORM_CONFIG_VALIDATION_ERR, + platform_name, p_config, - hass, p_integration.documentation, ) + config_exceptions.append(exc_info) continue - except Exception: # pylint: disable=broad-except - _LOGGER.exception( - ( - "Unknown error validating config for %s platform for %s" - " component with PLATFORM_SCHEMA" - ), + except Exception as exc: # pylint: disable=broad-except + exc_info = ConfigExceptionInfo( + exc, + ConfigErrorTranslationKey.PLATFORM_SCHEMA_VALIDATOR_ERR, p_name, - domain, + p_config, + p_integration.documentation, ) + config_exceptions.append(exc_info) continue platforms.append(p_validated) @@ -956,7 +1421,7 @@ async def async_process_component_config( # noqa: C901 config = config_without_domain(config, domain) config[domain] = platforms - return config + return IntegrationConfigInfo(config, config_exceptions) @callback @@ -981,36 +1446,6 @@ async def async_check_ha_config_file(hass: HomeAssistant) -> str | None: return res.error_str -@callback -def async_notify_setup_error( - hass: HomeAssistant, component: str, display_link: str | None = None -) -> None: - """Print a persistent notification. - - This method must be run in the event loop. - """ - # pylint: disable-next=import-outside-toplevel - from .components import persistent_notification - - if (errors := hass.data.get(DATA_PERSISTENT_ERRORS)) is None: - errors = hass.data[DATA_PERSISTENT_ERRORS] = {} - - errors[component] = errors.get(component) or display_link - - message = "The following integrations and platforms could not be set up:\n\n" - - for name, link in errors.items(): - show_logs = f"[Show logs](/config/logs?filter={name})" - part = f"[{name}]({link})" if link else name - message += f" - {part} ({show_logs})\n" - - message += "\nPlease check your config and [logs](/config/logs)." - - persistent_notification.async_create( - hass, message, "Invalid config", "invalid_config" - ) - - def safe_mode_enabled(config_dir: str) -> bool: """Return if safe mode is enabled. diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index 2b8f1ec4065..756b2def581 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -406,8 +406,8 @@ class ConfigEntry: "%s.async_setup_entry did not return boolean", integration.domain ) result = False - except ConfigEntryError as ex: - error_reason = str(ex) or "Unknown fatal config entry error" + except ConfigEntryError as exc: + error_reason = str(exc) or "Unknown fatal config entry error" _LOGGER.exception( "Error setting up entry %s for %s: %s", self.title, @@ -416,8 +416,8 @@ class ConfigEntry: ) await self._async_process_on_unload(hass) result = False - except ConfigEntryAuthFailed as ex: - message = str(ex) + except ConfigEntryAuthFailed as exc: + message = str(exc) auth_base_message = "could not authenticate" error_reason = message or auth_base_message auth_message = ( @@ -432,13 +432,13 @@ class ConfigEntry: await self._async_process_on_unload(hass) self.async_start_reauth(hass) result = False - except ConfigEntryNotReady as ex: - self._async_set_state(hass, ConfigEntryState.SETUP_RETRY, str(ex) or None) + except ConfigEntryNotReady as exc: + self._async_set_state(hass, ConfigEntryState.SETUP_RETRY, str(exc) or None) wait_time = 2 ** min(self._tries, 4) * 5 + ( randint(RANDOM_MICROSECOND_MIN, RANDOM_MICROSECOND_MAX) / 1000000 ) self._tries += 1 - message = str(ex) + message = str(exc) ready_message = f"ready yet: {message}" if message else "ready yet" _LOGGER.debug( ( @@ -565,13 +565,13 @@ class ConfigEntry: await self._async_process_on_unload(hass) return result - except Exception as ex: # pylint: disable=broad-except + except Exception as exc: # pylint: disable=broad-except _LOGGER.exception( "Error unloading entry %s for %s", self.title, integration.domain ) if integration.domain == self.domain: self._async_set_state( - hass, ConfigEntryState.FAILED_UNLOAD, str(ex) or "Unknown error" + hass, ConfigEntryState.FAILED_UNLOAD, str(exc) or "Unknown error" ) return False diff --git a/homeassistant/const.py b/homeassistant/const.py index c6655ba3900..8da1c251b4e 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -5,8 +5,8 @@ from enum import StrEnum from typing import Final APPLICATION_NAME: Final = "HomeAssistant" -MAJOR_VERSION: Final = 2023 -MINOR_VERSION: Final = 12 +MAJOR_VERSION: Final = 2024 +MINOR_VERSION: Final = 1 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" diff --git a/homeassistant/core.py b/homeassistant/core.py index d174786d968..7d9d8d19b49 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -80,7 +80,6 @@ from .exceptions import ( ServiceNotFound, Unauthorized, ) -from .helpers.aiohttp_compat import restore_original_aiohttp_cancel_behavior from .helpers.json import json_dumps from .util import dt as dt_util, location from .util.async_ import ( @@ -91,7 +90,7 @@ from .util.async_ import ( from .util.json import JsonObjectType from .util.read_only_dict import ReadOnlyDict from .util.timeout import TimeoutManager -from .util.ulid import ulid, ulid_at_time +from .util.ulid import ulid_at_time, ulid_now from .util.unit_system import ( _CONF_UNIT_SYSTEM_IMPERIAL, _CONF_UNIT_SYSTEM_US_CUSTOMARY, @@ -113,7 +112,6 @@ STAGE_2_SHUTDOWN_TIMEOUT = 60 STAGE_3_SHUTDOWN_TIMEOUT = 30 block_async_io.enable() -restore_original_aiohttp_cancel_behavior() _T = TypeVar("_T") _R = TypeVar("_R") @@ -874,8 +872,10 @@ class HomeAssistant: _LOGGER.exception( "Task %s could not be canceled during stage 3 shutdown", task ) - except Exception as ex: # pylint: disable=broad-except - _LOGGER.exception("Task %s error during stage 3 shutdown: %s", task, ex) + except Exception as exc: # pylint: disable=broad-except + _LOGGER.exception( + "Task %s error during stage 3 shutdown: %s", task, exc + ) # Prevent run_callback_threadsafe from scheduling any additional # callbacks in the event loop as callbacks created on the futures @@ -930,7 +930,7 @@ class Context: id: str | None = None, # pylint: disable=redefined-builtin ) -> None: """Init the context.""" - self.id = id or ulid() + self.id = id or ulid_now() self.user_id = user_id self.parent_id = parent_id self.origin_event: Event | None = None diff --git a/homeassistant/exceptions.py b/homeassistant/exceptions.py index 262b0e338ff..8d5e2bbde95 100644 --- a/homeassistant/exceptions.py +++ b/homeassistant/exceptions.py @@ -26,6 +26,31 @@ class HomeAssistantError(Exception): self.translation_placeholders = translation_placeholders +class ConfigValidationError(HomeAssistantError, ExceptionGroup[Exception]): + """A validation exception occurred when validating the configuration.""" + + def __init__( + self, + message: str, + exceptions: list[Exception], + translation_domain: str | None = None, + translation_key: str | None = None, + translation_placeholders: dict[str, str] | None = None, + ) -> None: + """Initialize exception.""" + super().__init__( + *(message, exceptions), + translation_domain=translation_domain, + translation_key=translation_key, + translation_placeholders=translation_placeholders, + ) + self._message = message + + def __str__(self) -> str: + """Return exception message string.""" + return self._message + + class ServiceValidationError(HomeAssistantError): """A validation exception occurred when calling a service.""" diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 1b67718c221..6def4498055 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -96,6 +96,7 @@ FLOWS = { "deconz", "deluge", "denonavr", + "devialet", "devolo_home_control", "devolo_home_network", "dexcom", @@ -142,12 +143,14 @@ FLOWS = { "evil_genius_labs", "ezviz", "faa_delays", + "fastdotcom", "fibaro", "filesize", "fireservicerota", "fitbit", "fivem", "fjaraskupan", + "flexit_bacnet", "flick_electric", "flipr", "flo", @@ -245,7 +248,6 @@ FLOWS = { "kmtronic", "knx", "kodi", - "komfovent", "konnected", "kostal_plenticore", "kraken", @@ -262,6 +264,7 @@ FLOWS = { "lidarr", "life360", "lifx", + "linear_garage_door", "litejet", "litterrobot", "livisi", @@ -346,6 +349,7 @@ FLOWS = { "opower", "oralb", "otbr", + "ourgroceries", "overkiz", "ovo_energy", "owntracks", @@ -353,9 +357,11 @@ FLOWS = { "panasonic_viera", "peco", "pegel_online", + "permobil", "philips_js", "pi_hole", "picnic", + "ping", "plaato", "plex", "plugwise", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 63c7cd84303..6d04d7602f2 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -109,6 +109,10 @@ DHCP: list[dict[str, str | bool]] = [ "domain": "broadlink", "macaddress": "EC0BAE*", }, + { + "domain": "broadlink", + "macaddress": "780F77*", + }, { "domain": "dlink", "hostname": "dsp-w215", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 0cf08240186..a59d6387c8c 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1073,6 +1073,12 @@ } } }, + "devialet": { + "name": "Devialet", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "device_sun_light_trigger": { "name": "Presence-based Lights", "integration_type": "hub", @@ -1558,12 +1564,6 @@ "eq3": { "name": "eQ-3", "integrations": { - "eq3btsmart": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling", - "name": "eQ-3 Bluetooth Smart Thermostats" - }, "maxcube": { "integration_type": "hub", "config_flow": false, @@ -1662,7 +1662,7 @@ "fastdotcom": { "name": "Fast.com", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "cloud_polling" }, "feedreader": { @@ -1724,7 +1724,7 @@ }, "fints": { "name": "FinTS", - "integration_type": "hub", + "integration_type": "service", "config_flow": false, "iot_class": "cloud_polling" }, @@ -1772,9 +1772,20 @@ }, "flexit": { "name": "Flexit", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" + "integrations": { + "flexit": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling", + "name": "Flexit" + }, + "flexit_bacnet": { + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling", + "name": "Flexit Nordic (BACnet)" + } + } }, "flexom": { "name": "Bouygues Flexom", @@ -2887,12 +2898,6 @@ "config_flow": true, "iot_class": "local_push" }, - "komfovent": { - "name": "Komfovent", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_polling" - }, "konnected": { "name": "Konnected.io", "integration_type": "hub", @@ -3059,6 +3064,12 @@ "config_flow": false, "iot_class": "assumed_state" }, + "linear_garage_door": { + "name": "Linear Garage Door", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "linksys_smart": { "name": "Linksys Smart Wi-Fi", "integration_type": "hub", @@ -4152,11 +4163,17 @@ "config_flow": false, "iot_class": "local_polling" }, + "ourgroceries": { + "name": "OurGroceries", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "overkiz": { "name": "Overkiz", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "local_polling" }, "ovo_energy": { "name": "OVO Energy", @@ -4242,6 +4259,12 @@ "integration_type": "virtual", "supported_by": "opower" }, + "permobil": { + "name": "MyPermobil", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "pge": { "name": "Pacific Gas & Electric (PG&E)", "integration_type": "virtual", @@ -4297,7 +4320,7 @@ "ping": { "name": "Ping (ICMP)", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "pioneer": { @@ -6104,7 +6127,7 @@ "iot_class": "cloud_polling" }, "universal": { - "name": "Universal Media Player", + "name": "Universal media player", "integration_type": "hub", "config_flow": false, "iot_class": "calculated" diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 92405d13f90..036c50ea445 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -52,7 +52,7 @@ HOMEKIT = { "always_discover": True, "domain": "hive", }, - "Healty Home Coach": { + "Healthy Home Coach": { "always_discover": True, "domain": "netatmo", }, @@ -116,6 +116,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Neon": { + "always_discover": True, + "domain": "lifx", + }, "LIFX Nightvision": { "always_discover": True, "domain": "lifx", @@ -128,6 +132,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX String": { + "always_discover": True, + "domain": "lifx", + }, "LIFX Tile": { "always_discover": True, "domain": "lifx", @@ -361,6 +369,11 @@ ZEROCONF = { "domain": "forked_daapd", }, ], + "_devialet-http._tcp.local.": [ + { + "domain": "devialet", + }, + ], "_dkapi._tcp.local.": [ { "domain": "daikin", @@ -513,6 +526,12 @@ ZEROCONF = { "name": "gateway*", }, ], + "_kizboxdev._tcp.local.": [ + { + "domain": "overkiz", + "name": "gateway*", + }, + ], "_lookin._tcp.local.": [ { "domain": "lookin", diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index b8d810d899b..74527a5922f 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -58,19 +58,6 @@ MAXIMUM_CONNECTIONS = 4096 MAXIMUM_CONNECTIONS_PER_HOST = 100 -# Overwrite base aiohttp _wait implementation -# Homeassistant has a custom shutdown wait logic. -async def _noop_wait(*args: Any, **kwargs: Any) -> None: - """Do nothing.""" - return - - -# TODO: Remove version check with aiohttp 3.9.0 # pylint: disable=fixme -if sys.version_info >= (3, 12): - # pylint: disable-next=protected-access - web.BaseSite._wait = _noop_wait # type: ignore[method-assign] - - class HassClientResponse(aiohttp.ClientResponse): """aiohttp.ClientResponse with a json method that uses json_loads by default.""" @@ -311,7 +298,7 @@ def _async_get_connector( return connectors[connector_key] if verify_ssl: - ssl_context: bool | SSLContext = ssl_util.get_default_context() + ssl_context: SSLContext = ssl_util.get_default_context() else: ssl_context = ssl_util.get_default_no_verify_context() diff --git a/homeassistant/helpers/aiohttp_compat.py b/homeassistant/helpers/aiohttp_compat.py deleted file mode 100644 index 6e281b659fe..00000000000 --- a/homeassistant/helpers/aiohttp_compat.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Helper to restore old aiohttp behavior.""" -from __future__ import annotations - -from aiohttp import web, web_protocol, web_server - - -class CancelOnDisconnectRequestHandler(web_protocol.RequestHandler): - """Request handler that cancels tasks on disconnect.""" - - def connection_lost(self, exc: BaseException | None) -> None: - """Handle connection lost.""" - task_handler = self._task_handler - super().connection_lost(exc) - if task_handler is not None: - task_handler.cancel("aiohttp connection lost") - - -def restore_original_aiohttp_cancel_behavior() -> None: - """Patch aiohttp to restore cancel behavior. - - Remove this once aiohttp 3.9 is released as we can use - https://github.com/aio-libs/aiohttp/pull/7128 - """ - web_protocol.RequestHandler = CancelOnDisconnectRequestHandler # type: ignore[misc] - web_server.RequestHandler = CancelOnDisconnectRequestHandler # type: ignore[misc] - - -def enable_compression(response: web.Response) -> None: - """Enable compression on the response.""" - # - # Set _zlib_executor_size in the constructor once support for - # aiohttp < 3.9.0 is dropped - # - # We want large zlib payloads to be compressed in the executor - # to avoid blocking the event loop. - # - # 32KiB was chosen based on testing in production. - # aiohttp will generate a warning for payloads larger than 1MiB - # - response._zlib_executor_size = 32768 # pylint: disable=protected-access - response.enable_compression() diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 441381f9994..23707949dcd 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -15,9 +15,10 @@ from homeassistant.config import ( # type: ignore[attr-defined] CONF_PACKAGES, CORE_CONFIG_SCHEMA, YAML_CONFIG_FILE, - _format_config_error, config_per_platform, extract_domain_configs, + format_homeassistant_error, + format_schema_error, load_yaml_config_file, merge_packages_config, ) @@ -92,21 +93,33 @@ async def async_check_ha_config_file( # noqa: C901 async_clear_install_history(hass) def _pack_error( - package: str, component: str, config: ConfigType, message: str + hass: HomeAssistant, + package: str, + component: str, + config: ConfigType, + message: str, ) -> None: - """Handle errors from packages: _log_pkg_error.""" - message = f"Package {package} setup failed. Component {component} {message}" + """Handle errors from packages.""" + message = f"Setup of package '{package}' failed: {message}" domain = f"homeassistant.packages.{package}.{component}" pack_config = core_config[CONF_PACKAGES].get(package, config) result.add_warning(message, domain, pack_config) - def _comp_error(ex: Exception, domain: str, component_config: ConfigType) -> None: - """Handle errors from components: async_log_exception.""" - message = _format_config_error(ex, domain, component_config)[0] - if domain in frontend_dependencies: - result.add_error(message, domain, component_config) + def _comp_error( + ex: vol.Invalid | HomeAssistantError, + domain: str, + component_config: ConfigType, + config_to_attach: ConfigType, + ) -> None: + """Handle errors from components.""" + if isinstance(ex, vol.Invalid): + message = format_schema_error(hass, ex, domain, component_config) else: - result.add_warning(message, domain, component_config) + message = format_homeassistant_error(hass, ex, domain, component_config) + if domain in frontend_dependencies: + result.add_error(message, domain, config_to_attach) + else: + result.add_warning(message, domain, config_to_attach) async def _get_integration( hass: HomeAssistant, domain: str @@ -149,7 +162,9 @@ async def async_check_ha_config_file( # noqa: C901 result[CONF_CORE] = core_config except vol.Invalid as err: result.add_error( - _format_config_error(err, CONF_CORE, core_config)[0], CONF_CORE, core_config + format_schema_error(hass, err, CONF_CORE, core_config), + CONF_CORE, + core_config, ) core_config = {} @@ -201,7 +216,7 @@ async def async_check_ha_config_file( # noqa: C901 )[domain] continue except (vol.Invalid, HomeAssistantError) as ex: - _comp_error(ex, domain, config) + _comp_error(ex, domain, config, config[domain]) continue except Exception as err: # pylint: disable=broad-except logging.getLogger(__name__).exception( @@ -217,12 +232,12 @@ async def async_check_ha_config_file( # noqa: C901 config_schema = getattr(component, "CONFIG_SCHEMA", None) if config_schema is not None: try: - config = config_schema(config) + validated_config = config_schema(config) # Don't fail if the validator removed the domain from the config - if domain in config: - result[domain] = config[domain] + if domain in validated_config: + result[domain] = validated_config[domain] except vol.Invalid as ex: - _comp_error(ex, domain, config) + _comp_error(ex, domain, config, config[domain]) continue component_platform_schema = getattr( @@ -240,7 +255,7 @@ async def async_check_ha_config_file( # noqa: C901 try: p_validated = component_platform_schema(p_config) except vol.Invalid as ex: - _comp_error(ex, domain, p_config) + _comp_error(ex, domain, p_config, p_config) continue # Not all platform components follow same pattern for platforms @@ -276,7 +291,7 @@ async def async_check_ha_config_file( # noqa: C901 try: p_validated = platform_schema(p_validated) except vol.Invalid as ex: - _comp_error(ex, f"{domain}.{p_name}", p_config) + _comp_error(ex, f"{domain}.{p_name}", p_config, p_config) continue platforms.append(p_validated) diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index ddd46759259..775d0934c36 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -355,7 +355,7 @@ class EntityComponent(Generic[_EntityT]): integration = await async_get_integration(self.hass, self.domain) - processed_conf = await conf_util.async_process_component_config( + processed_conf = await conf_util.async_process_component_and_handle_errors( self.hass, conf, integration ) diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index 388c00bd177..2fc82567739 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -813,7 +813,7 @@ class EntityPlatform: def async_register_entity_service( self, name: str, - schema: dict[str, Any] | vol.Schema, + schema: dict[str | vol.Marker, Any] | vol.Schema, func: str | Callable[..., Any], required_features: Iterable[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 648e0e5bd09..1de7a6c6a43 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -251,7 +251,9 @@ def async_track_state_change( return async_track_state_change_event(hass, entity_ids, state_change_listener) return hass.bus.async_listen( - EVENT_STATE_CHANGED, state_change_dispatcher, event_filter=state_change_filter # type: ignore[arg-type] + EVENT_STATE_CHANGED, + state_change_dispatcher, # type: ignore[arg-type] + event_filter=state_change_filter, # type: ignore[arg-type] ) @@ -761,7 +763,8 @@ class _TrackStateChangeFiltered: @callback def _setup_all_listener(self) -> None: self._listeners[_ALL_LISTENER] = self.hass.bus.async_listen( - EVENT_STATE_CHANGED, self._action # type: ignore[arg-type] + EVENT_STATE_CHANGED, + self._action, # type: ignore[arg-type] ) @@ -1335,7 +1338,8 @@ def async_track_same_state( if entity_ids == MATCH_ALL: async_remove_state_for_cancel = hass.bus.async_listen( - EVENT_STATE_CHANGED, state_for_cancel_listener # type: ignore[arg-type] + EVENT_STATE_CHANGED, + state_for_cancel_listener, # type: ignore[arg-type] ) else: async_remove_state_for_cancel = async_track_state_change_event( diff --git a/homeassistant/helpers/network.py b/homeassistant/helpers/network.py index 12accf2725a..58ca191feb0 100644 --- a/homeassistant/helpers/network.py +++ b/homeassistant/helpers/network.py @@ -299,3 +299,14 @@ def _get_cloud_url(hass: HomeAssistant, require_current_request: bool = False) - return normalize_url(str(cloud_url)) raise NoURLAvailableError + + +def is_cloud_connection(hass: HomeAssistant) -> bool: + """Return True if the current connection is a nabucasa cloud connection.""" + + if "cloud" not in hass.config.components: + return False + + from hass_nabucasa import remote # pylint: disable=import-outside-toplevel + + return remote.is_cloud_request.get() diff --git a/homeassistant/helpers/reload.py b/homeassistant/helpers/reload.py index 75529476dd2..42ebc2d0869 100644 --- a/homeassistant/helpers/reload.py +++ b/homeassistant/helpers/reload.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import Iterable import logging -from typing import Any +from typing import Any, Literal, overload from homeassistant import config as conf_util from homeassistant.const import SERVICE_RELOAD @@ -26,7 +26,7 @@ PLATFORM_RESET_LOCK = "lock_async_reset_platform_{}" async def async_reload_integration_platforms( - hass: HomeAssistant, integration_name: str, integration_platforms: Iterable[str] + hass: HomeAssistant, integration_domain: str, platform_domains: Iterable[str] ) -> None: """Reload an integration's platforms. @@ -44,10 +44,8 @@ async def async_reload_integration_platforms( return tasks = [ - _resetup_platform( - hass, integration_name, integration_platform, unprocessed_conf - ) - for integration_platform in integration_platforms + _resetup_platform(hass, integration_domain, platform_domain, unprocessed_conf) + for platform_domain in platform_domains ] await asyncio.gather(*tasks) @@ -55,27 +53,27 @@ async def async_reload_integration_platforms( async def _resetup_platform( hass: HomeAssistant, - integration_name: str, - integration_platform: str, - unprocessed_conf: ConfigType, + integration_domain: str, + platform_domain: str, + unprocessed_config: ConfigType, ) -> None: """Resetup a platform.""" - integration = await async_get_integration(hass, integration_platform) + integration = await async_get_integration(hass, platform_domain) - conf = await conf_util.async_process_component_config( - hass, unprocessed_conf, integration + conf = await conf_util.async_process_component_and_handle_errors( + hass, unprocessed_config, integration ) if not conf: return - root_config: dict[str, list[ConfigType]] = {integration_platform: []} + root_config: dict[str, list[ConfigType]] = {platform_domain: []} # Extract only the config for template, ignore the rest. - for p_type, p_config in config_per_platform(conf, integration_platform): - if p_type != integration_name: + for p_type, p_config in config_per_platform(conf, platform_domain): + if p_type != integration_domain: continue - root_config[integration_platform].append(p_config) + root_config[platform_domain].append(p_config) component = integration.get_component() @@ -83,47 +81,47 @@ async def _resetup_platform( # If the integration has its own way to reset # use this method. async with hass.data.setdefault( - PLATFORM_RESET_LOCK.format(integration_platform), asyncio.Lock() + PLATFORM_RESET_LOCK.format(platform_domain), asyncio.Lock() ): - await component.async_reset_platform(hass, integration_name) + await component.async_reset_platform(hass, integration_domain) await component.async_setup(hass, root_config) return # If it's an entity platform, we use the entity_platform # async_reset method platform = async_get_platform_without_config_entry( - hass, integration_name, integration_platform + hass, integration_domain, platform_domain ) if platform: - await _async_reconfig_platform(platform, root_config[integration_platform]) + await _async_reconfig_platform(platform, root_config[platform_domain]) return - if not root_config[integration_platform]: + if not root_config[platform_domain]: # No config for this platform # and it's not loaded. Nothing to do. return await _async_setup_platform( - hass, integration_name, integration_platform, root_config[integration_platform] + hass, integration_domain, platform_domain, root_config[platform_domain] ) async def _async_setup_platform( hass: HomeAssistant, - integration_name: str, - integration_platform: str, + integration_domain: str, + platform_domain: str, platform_configs: list[dict[str, Any]], ) -> None: """Platform for the first time when new configuration is added.""" - if integration_platform not in hass.data: + if platform_domain not in hass.data: await async_setup_component( - hass, integration_platform, {integration_platform: platform_configs} + hass, platform_domain, {platform_domain: platform_configs} ) return - entity_component: EntityComponent[Entity] = hass.data[integration_platform] + entity_component: EntityComponent[Entity] = hass.data[platform_domain] tasks = [ - entity_component.async_setup_platform(integration_name, p_config) + entity_component.async_setup_platform(integration_domain, p_config) for p_config in platform_configs ] await asyncio.gather(*tasks) @@ -138,14 +136,41 @@ async def _async_reconfig_platform( await asyncio.gather(*tasks) +@overload async def async_integration_yaml_config( hass: HomeAssistant, integration_name: str +) -> ConfigType | None: + ... + + +@overload +async def async_integration_yaml_config( + hass: HomeAssistant, + integration_name: str, + *, + raise_on_failure: Literal[True], +) -> ConfigType: + ... + + +@overload +async def async_integration_yaml_config( + hass: HomeAssistant, + integration_name: str, + *, + raise_on_failure: Literal[False] | bool, +) -> ConfigType | None: + ... + + +async def async_integration_yaml_config( + hass: HomeAssistant, integration_name: str, *, raise_on_failure: bool = False ) -> ConfigType | None: """Fetch the latest yaml configuration for an integration.""" integration = await async_get_integration(hass, integration_name) - - return await conf_util.async_process_component_config( - hass, await conf_util.async_hass_config_yaml(hass), integration + config = await conf_util.async_hass_config_yaml(hass) + return await conf_util.async_process_component_and_handle_errors( + hass, config, integration, raise_on_failure=raise_on_failure ) diff --git a/homeassistant/helpers/restore_state.py b/homeassistant/helpers/restore_state.py index 4dd71a584ec..625bab8b218 100644 --- a/homeassistant/helpers/restore_state.py +++ b/homeassistant/helpers/restore_state.py @@ -190,7 +190,8 @@ class RestoreStateData: state, self.entities[state.entity_id].extra_restore_state_data, now ) for state in all_states - if state.entity_id in self.entities and + if state.entity_id in self.entities + and # Ignore all states that are entity registry placeholders not state.attributes.get(ATTR_RESTORED) ] diff --git a/homeassistant/helpers/schema_config_entry_flow.py b/homeassistant/helpers/schema_config_entry_flow.py index dcf7f07bf6b..2bbad0ed63a 100644 --- a/homeassistant/helpers/schema_config_entry_flow.py +++ b/homeassistant/helpers/schema_config_entry_flow.py @@ -331,7 +331,12 @@ class SchemaConfigFlowHandler(config_entries.ConfigFlow, ABC): return cls.options_flow is not None @staticmethod - def _async_step(step_id: str) -> Callable: + def _async_step( + step_id: str, + ) -> Callable[ + [SchemaConfigFlowHandler, dict[str, Any] | None], + Coroutine[Any, Any, FlowResult], + ]: """Generate a step handler.""" async def _async_step( @@ -421,7 +426,12 @@ class SchemaOptionsFlowHandler(config_entries.OptionsFlowWithConfigEntry): setattr(self, "async_setup_preview", async_setup_preview) @staticmethod - def _async_step(step_id: str) -> Callable: + def _async_step( + step_id: str, + ) -> Callable[ + [SchemaConfigFlowHandler, dict[str, Any] | None], + Coroutine[Any, Any, FlowResult], + ]: """Generate a step handler.""" async def _async_step( diff --git a/homeassistant/helpers/selector.py b/homeassistant/helpers/selector.py index ac5166911ff..f7ceb4ab812 100644 --- a/homeassistant/helpers/selector.py +++ b/homeassistant/helpers/selector.py @@ -425,10 +425,20 @@ class ColorRGBSelector(Selector[ColorRGBSelectorConfig]): class ColorTempSelectorConfig(TypedDict, total=False): """Class to represent a color temp selector config.""" + unit: ColorTempSelectorUnit + min: int + max: int max_mireds: int min_mireds: int +class ColorTempSelectorUnit(StrEnum): + """Possible units for a color temperature selector.""" + + KELVIN = "kelvin" + MIRED = "mired" + + @SELECTORS.register("color_temp") class ColorTempSelector(Selector[ColorTempSelectorConfig]): """Selector of an color temperature.""" @@ -437,6 +447,11 @@ class ColorTempSelector(Selector[ColorTempSelectorConfig]): CONFIG_SCHEMA = vol.Schema( { + vol.Optional("unit", default=ColorTempSelectorUnit.MIRED): vol.All( + vol.Coerce(ColorTempSelectorUnit), lambda val: val.value + ), + vol.Optional("min"): vol.Coerce(int), + vol.Optional("max"): vol.Coerce(int), vol.Optional("max_mireds"): vol.Coerce(int), vol.Optional("min_mireds"): vol.Coerce(int), } @@ -448,11 +463,20 @@ class ColorTempSelector(Selector[ColorTempSelectorConfig]): def __call__(self, data: Any) -> int: """Validate the passed selection.""" + range_min = self.config.get("min") + range_max = self.config.get("max") + + if not range_min: + range_min = self.config.get("min_mireds") + + if not range_max: + range_max = self.config.get("max_mireds") + value: int = vol.All( vol.Coerce(float), vol.Range( - min=self.config.get("min_mireds"), - max=self.config.get("max_mireds"), + min=range_min, + max=range_max, ), )(data) return value @@ -1182,6 +1206,7 @@ class TextSelectorConfig(TypedDict, total=False): suffix: str type: TextSelectorType autocomplete: str + multiple: bool class TextSelectorType(StrEnum): @@ -1219,6 +1244,7 @@ class TextSelector(Selector[TextSelectorConfig]): vol.Coerce(TextSelectorType), lambda val: val.value ), vol.Optional("autocomplete"): str, + vol.Optional("multiple", default=False): bool, } ) @@ -1226,10 +1252,14 @@ class TextSelector(Selector[TextSelectorConfig]): """Instantiate a selector.""" super().__init__(config) - def __call__(self, data: Any) -> str: + def __call__(self, data: Any) -> str | list[str]: """Validate the passed selection.""" - text: str = vol.Schema(str)(data) - return text + if not self.config["multiple"]: + text: str = vol.Schema(str)(data) + return text + if not isinstance(data, list): + raise vol.Invalid("Value should be a list") + return [vol.Schema(str)(val) for val in data] class ThemeSelectorConfig(TypedDict): diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index b74c22c9ead..606b90e6005 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -99,8 +99,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): # Pick a random microsecond in range 0.05..0.50 to stagger the refreshes # and avoid a thundering herd. self._microsecond = ( - randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX) - / 10**6 + randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX) / 10**6 ) self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {} diff --git a/homeassistant/loader.py b/homeassistant/loader.py index ce868ab85f3..6fb538a5aef 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -403,9 +403,7 @@ async def async_get_zeroconf( hass: HomeAssistant, ) -> dict[str, list[dict[str, str | dict[str, str]]]]: """Return cached list of zeroconf types.""" - zeroconf: dict[ - str, list[dict[str, str | dict[str, str]]] - ] = ZEROCONF.copy() # type: ignore[assignment] + zeroconf: dict[str, list[dict[str, str | dict[str, str]]]] = ZEROCONF.copy() # type: ignore[assignment] integrations = await async_get_custom_components(hass) for integration in integrations.values(): @@ -1013,9 +1011,7 @@ def _load_file( Async friendly. """ with suppress(KeyError): - return hass.data[DATA_COMPONENTS][ # type: ignore[no-any-return] - comp_or_platform - ] + return hass.data[DATA_COMPONENTS][comp_or_platform] # type: ignore[no-any-return] cache = hass.data[DATA_COMPONENTS] diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index ebbe9686044..a219df6fd98 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -1,8 +1,9 @@ +# Automatically generated by gen_requirements_all.py, do not edit + aiodiscover==1.5.1 -aiohttp-fast-url-dispatcher==0.1.0 +aiohttp-fast-url-dispatcher==0.3.0 aiohttp-zlib-ng==0.1.1 -aiohttp==3.8.5;python_version<'3.12' -aiohttp==3.9.0b0;python_version>='3.12' +aiohttp==3.9.1 aiohttp_cors==0.7.0 astral==2.2 async-upnp-client==0.36.2 @@ -14,19 +15,19 @@ bleak-retry-connector==3.3.0 bleak==0.21.1 bluetooth-adapters==0.16.1 bluetooth-auto-recovery==1.2.3 -bluetooth-data-tools==1.14.0 +bluetooth-data-tools==1.16.0 certifi>=2021.5.30 ciso8601==2.3.0 -cryptography==41.0.5 +cryptography==41.0.7 dbus-fast==2.14.0 fnv-hash-fast==0.5.0 ha-av==10.1.1 ha-ffmpeg==3.1.0 hass-nabucasa==0.74.0 -hassil==1.2.5 +hassil==1.5.1 home-assistant-bluetooth==1.10.4 -home-assistant-frontend==20231030.2 -home-assistant-intents==2023.10.16 +home-assistant-frontend==20231130.0 +home-assistant-intents==2023.11.29 httpx==0.25.0 ifaddr==0.2.0 janus==1.0.0 @@ -56,7 +57,7 @@ voluptuous-serialize==2.6.0 voluptuous==0.13.1 webrtc-noise-gain==1.2.3 yarl==1.9.2 -zeroconf==0.125.0 +zeroconf==0.127.0 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 @@ -152,7 +153,7 @@ pyOpenSSL>=23.1.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.0 +protobuf==4.25.1 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder diff --git a/homeassistant/scripts/check_config.py b/homeassistant/scripts/check_config.py index 25922ab1f81..0e00d0b75f2 100644 --- a/homeassistant/scripts/check_config.py +++ b/homeassistant/scripts/check_config.py @@ -290,13 +290,13 @@ def dump_dict(layer, indent_count=3, listi=False, **kwargs): for key, value in sorted(layer.items(), key=sort_dict_key): if isinstance(value, (dict, list)): print(indent_str, str(key) + ":", line_info(value, **kwargs)) - dump_dict(value, indent_count + 2) + dump_dict(value, indent_count + 2, **kwargs) else: - print(indent_str, str(key) + ":", value) + print(indent_str, str(key) + ":", value, line_info(key, **kwargs)) indent_str = indent_count * " " if isinstance(layer, Sequence): for i in layer: if isinstance(i, dict): - dump_dict(i, indent_count + 2, True) + dump_dict(i, indent_count + 2, True, **kwargs) else: print(" ", indent_str, i) diff --git a/homeassistant/setup.py b/homeassistant/setup.py index bf405d5deda..679042bc4e9 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -11,14 +11,13 @@ from types import ModuleType from typing import Any from . import config as conf_util, core, loader, requirements -from .config import async_notify_setup_error from .const import ( EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START, PLATFORM_FORMAT, Platform, ) -from .core import CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN +from .core import CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from .exceptions import DependencyError, HomeAssistantError from .helpers.issue_registry import IssueSeverity, async_create_issue from .helpers.typing import ConfigType @@ -56,10 +55,47 @@ DATA_SETUP_TIME = "setup_time" DATA_DEPS_REQS = "deps_reqs_processed" +DATA_PERSISTENT_ERRORS = "bootstrap_persistent_errors" + +NOTIFY_FOR_TRANSLATION_KEYS = [ + "config_validation_err", + "platform_config_validation_err", +] + SLOW_SETUP_WARNING = 10 SLOW_SETUP_MAX_WAIT = 300 +@callback +def async_notify_setup_error( + hass: HomeAssistant, component: str, display_link: str | None = None +) -> None: + """Print a persistent notification. + + This method must be run in the event loop. + """ + # pylint: disable-next=import-outside-toplevel + from .components import persistent_notification + + if (errors := hass.data.get(DATA_PERSISTENT_ERRORS)) is None: + errors = hass.data[DATA_PERSISTENT_ERRORS] = {} + + errors[component] = errors.get(component) or display_link + + message = "The following integrations and platforms could not be set up:\n\n" + + for name, link in errors.items(): + show_logs = f"[Show logs](/config/logs?filter={name})" + part = f"[{name}]({link})" if link else name + message += f" - {part} ({show_logs})\n" + + message += "\nPlease check your config and [logs](/config/logs)." + + persistent_notification.async_create( + hass, message, "Invalid config", "invalid_config" + ) + + @core.callback def async_set_domains_to_be_loaded(hass: core.HomeAssistant, domains: set[str]) -> None: """Set domains that are going to be loaded from the config. @@ -157,7 +193,7 @@ async def _async_process_dependencies( if failed: _LOGGER.error( - "Unable to set up dependencies of %s. Setup failed for dependencies: %s", + "Unable to set up dependencies of '%s'. Setup failed for dependencies: %s", integration.domain, ", ".join(failed), ) @@ -183,7 +219,7 @@ async def _async_setup_component( custom = "" if integration.is_built_in else "custom integration " link = integration.documentation _LOGGER.error( - "Setup failed for %s%s: %s", custom, domain, msg, exc_info=exc_info + "Setup failed for %s'%s': %s", custom, domain, msg, exc_info=exc_info ) async_notify_setup_error(hass, domain, link) @@ -217,10 +253,18 @@ async def _async_setup_component( log_error(f"Unable to import component: {err}", err) return False - processed_config = await conf_util.async_process_component_config( + integration_config_info = await conf_util.async_process_component_config( hass, config, integration ) - + processed_config = conf_util.async_handle_component_errors( + hass, integration_config_info, integration + ) + for platform_exception in integration_config_info.exception_info_list: + if platform_exception.translation_key not in NOTIFY_FOR_TRANSLATION_KEYS: + continue + async_notify_setup_error( + hass, platform_exception.platform_name, platform_exception.integration_link + ) if processed_config is None: log_error("Invalid config.") return False @@ -234,8 +278,8 @@ async def _async_setup_component( ): _LOGGER.error( ( - "The %s integration does not support YAML setup, please remove it from " - "your configuration" + "The '%s' integration does not support YAML setup, please remove it " + "from your configuration" ), domain, ) @@ -289,7 +333,7 @@ async def _async_setup_component( except asyncio.TimeoutError: _LOGGER.error( ( - "Setup of %s is taking longer than %s seconds." + "Setup of '%s' is taking longer than %s seconds." " Startup will proceed without waiting any longer" ), domain, @@ -356,7 +400,9 @@ async def async_prepare_setup_platform( def log_error(msg: str) -> None: """Log helper.""" - _LOGGER.error("Unable to prepare setup for platform %s: %s", platform_path, msg) + _LOGGER.error( + "Unable to prepare setup for platform '%s': %s", platform_path, msg + ) async_notify_setup_error(hass, platform_path) try: diff --git a/homeassistant/util/color.py b/homeassistant/util/color.py index 8e7fc3dc155..4520a62a5d8 100644 --- a/homeassistant/util/color.py +++ b/homeassistant/util/color.py @@ -576,6 +576,18 @@ def _white_levels_to_color_temperature( ), min(255, round(brightness * 255)) +def color_xy_to_temperature(x: float, y: float) -> int: + """Convert an xy color to a color temperature in Kelvin. + + Uses McCamy's approximation (https://doi.org/10.1002/col.5080170211), + close enough for uses between 2000 K and 10000 K. + """ + n = (x - 0.3320) / (0.1858 - y) + CCT = 437 * (n**3) + 3601 * (n**2) + 6861 * n + 5517 + + return int(CCT) + + def _clamp(color_component: float, minimum: float = 0, maximum: float = 255) -> float: """Clamp the given color component value between the given min and max values. diff --git a/homeassistant/util/json.py b/homeassistant/util/json.py index 7f81c281340..ac18d43727c 100644 --- a/homeassistant/util/json.py +++ b/homeassistant/util/json.py @@ -57,7 +57,8 @@ def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObject def load_json( - filename: str | PathLike, default: JsonValueType = _SENTINEL # type: ignore[assignment] + filename: str | PathLike, + default: JsonValueType = _SENTINEL, # type: ignore[assignment] ) -> JsonValueType: """Load JSON data from a file. @@ -79,7 +80,8 @@ def load_json( def load_json_array( - filename: str | PathLike, default: JsonArrayType = _SENTINEL # type: ignore[assignment] + filename: str | PathLike, + default: JsonArrayType = _SENTINEL, # type: ignore[assignment] ) -> JsonArrayType: """Load JSON data from a file and return as list. @@ -98,7 +100,8 @@ def load_json_array( def load_json_object( - filename: str | PathLike, default: JsonObjectType = _SENTINEL # type: ignore[assignment] + filename: str | PathLike, + default: JsonObjectType = _SENTINEL, # type: ignore[assignment] ) -> JsonObjectType: """Load JSON data from a file and return as dict. diff --git a/homeassistant/util/location.py b/homeassistant/util/location.py index 44fcaa07067..b2ef7330660 100644 --- a/homeassistant/util/location.py +++ b/homeassistant/util/location.py @@ -129,6 +129,7 @@ def vincenty( uSq = cosSqAlpha * (AXIS_A**2 - AXIS_B**2) / (AXIS_B**2) A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq))) B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq))) + # fmt: off deltaSigma = ( B * sinSigma @@ -141,11 +142,12 @@ def vincenty( - B / 6 * cos2SigmaM - * (-3 + 4 * sinSigma**2) - * (-3 + 4 * cos2SigmaM**2) + * (-3 + 4 * sinSigma ** 2) + * (-3 + 4 * cos2SigmaM ** 2) ) ) ) + # fmt: on s = AXIS_B * A * (sigma - deltaSigma) s /= 1000 # Conversion of meters to kilometers diff --git a/homeassistant/util/ulid.py b/homeassistant/util/ulid.py index 643286cedb9..818b8015549 100644 --- a/homeassistant/util/ulid.py +++ b/homeassistant/util/ulid.py @@ -1,11 +1,22 @@ """Helpers to generate ulids.""" from __future__ import annotations -import time +from ulid_transform import ( + bytes_to_ulid, + ulid_at_time, + ulid_hex, + ulid_now, + ulid_to_bytes, +) -from ulid_transform import bytes_to_ulid, ulid_at_time, ulid_hex, ulid_to_bytes - -__all__ = ["ulid", "ulid_hex", "ulid_at_time", "ulid_to_bytes", "bytes_to_ulid"] +__all__ = [ + "ulid", + "ulid_hex", + "ulid_at_time", + "ulid_to_bytes", + "bytes_to_ulid", + "ulid_now", +] def ulid(timestamp: float | None = None) -> str: @@ -25,4 +36,4 @@ def ulid(timestamp: float | None = None) -> str: import ulid ulid.parse(ulid_util.ulid()) """ - return ulid_at_time(timestamp or time.time()) + return ulid_now() if timestamp is None else ulid_at_time(timestamp) diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index 6c2cfa1f953..275a51cd760 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -23,6 +23,7 @@ except ImportError: ) from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.deprecation import deprecated_class from .const import SECRET_YAML from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass @@ -136,6 +137,11 @@ class FastSafeLoader(FastestAvailableSafeLoader, _LoaderMixin): self.secrets = secrets +@deprecated_class("FastSafeLoader") +class SafeLoader(FastSafeLoader): + """Provided for backwards compatibility. Logs when instantiated.""" + + class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): """Python safe loader.""" @@ -145,6 +151,11 @@ class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): self.secrets = secrets +@deprecated_class("PythonSafeLoader") +class SafeLineLoader(PythonSafeLoader): + """Provided for backwards compatibility. Logs when instantiated.""" + + LoaderType = FastSafeLoader | PythonSafeLoader @@ -238,7 +249,7 @@ def _add_reference( # type: ignore[no-untyped-def] def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: - """Load another YAML file and embeds it using the !include tag. + """Load another YAML file and embed it using the !include tag. Example: device_tracker: !include device_tracker.yaml @@ -340,7 +351,12 @@ def _handle_mapping_tag( raise yaml.MarkedYAMLError( context=f'invalid key: "{key}"', context_mark=yaml.Mark( - fname, 0, line, -1, None, None # type: ignore[arg-type] + fname, + 0, + line, + -1, + None, + None, # type: ignore[arg-type] ), ) from exc diff --git a/homeassistant/util/yaml/objects.py b/homeassistant/util/yaml/objects.py index b2320a74d2c..6aedc85cf60 100644 --- a/homeassistant/util/yaml/objects.py +++ b/homeassistant/util/yaml/objects.py @@ -2,7 +2,10 @@ from __future__ import annotations from dataclasses import dataclass +from typing import Any +import voluptuous as vol +from voluptuous.schema_builder import _compile_scalar import yaml @@ -13,6 +16,10 @@ class NodeListClass(list): class NodeStrClass(str): """Wrapper class to be able to add attributes on a string.""" + def __voluptuous_compile__(self, schema: vol.Schema) -> Any: + """Needed because vol.Schema.compile does not handle str subclasses.""" + return _compile_scalar(self) + class NodeDictClass(dict): """Wrapper class to be able to add attributes on a dict.""" diff --git a/mypy.ini b/mypy.ini index 697907c50db..ad0b4527930 100644 --- a/mypy.ini +++ b/mypy.ini @@ -971,6 +971,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.faa_delays.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.fan.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1041,6 +1051,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.flexit_bacnet.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.flux_led.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1791,6 +1811,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.linear_garage_door.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.litejet.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2401,6 +2431,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.pushbullet.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.pvoutput.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pyproject.toml b/pyproject.toml index 550cafc4146..844fac7142f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2023.12.0.dev0" +version = "2024.1.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -23,10 +23,9 @@ classifiers = [ ] requires-python = ">=3.11.0" dependencies = [ - "aiohttp==3.9.0b0;python_version>='3.12'", - "aiohttp==3.8.5;python_version<'3.12'", + "aiohttp==3.9.1", "aiohttp_cors==0.7.0", - "aiohttp-fast-url-dispatcher==0.1.0", + "aiohttp-fast-url-dispatcher==0.3.0", "aiohttp-zlib-ng==0.1.1", "astral==2.2", "attrs==23.1.0", @@ -44,7 +43,7 @@ dependencies = [ "lru-dict==1.2.0", "PyJWT==2.8.0", # PyJWT has loose dependency. We want the latest one. - "cryptography==41.0.5", + "cryptography==41.0.7", # pyOpenSSL 23.2.0 is required to work with cryptography 41+ "pyOpenSSL==23.2.0", "orjson==3.9.9", @@ -79,9 +78,6 @@ include-package-data = true [tool.setuptools.packages.find] include = ["homeassistant*"] -[tool.black] -extend-exclude = "/generated/" - [tool.pylint.MAIN] py-version = "3.11" ignore = [ @@ -128,7 +124,7 @@ class-const-naming-style = "any" [tool.pylint."MESSAGES CONTROL"] # Reasons disabled: -# format - handled by black +# format - handled by ruff # locally-disabled - it spams too much # duplicate-code - unavoidable # cyclic-import - doesn't test if both import on load @@ -505,8 +501,6 @@ filterwarnings = [ "ignore:python-telegram-bot is using upstream urllib3:UserWarning:telegram.utils.request", # https://github.com/ludeeus/pytraccar/pull/15 - >1.0.0 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pytraccar.client", - # https://github.com/zopefoundation/RestrictedPython/pull/259 - >7.0a1.dev0 - "ignore:ast\\.(Str|Num) is deprecated and will be removed in Python 3.14:DeprecationWarning:RestrictedPython.transformer", # https://github.com/grahamwetzler/smart-meter-texas/pull/143 - >0.5.3 "ignore:ssl.OP_NO_SSL\\*/ssl.OP_NO_TLS\\* options are deprecated:DeprecationWarning:smart_meter_texas", # https://github.com/Bluetooth-Devices/xiaomi-ble/pull/59 - >0.21.1 diff --git a/requirements.txt b/requirements.txt index 1ca4643a747..aa9a0ab0e5a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,11 @@ +# Automatically generated by gen_requirements_all.py, do not edit + -c homeassistant/package_constraints.txt # Home Assistant Core -aiohttp==3.9.0b0;python_version>='3.12' -aiohttp==3.8.5;python_version<'3.12' +aiohttp==3.9.1 aiohttp_cors==0.7.0 -aiohttp-fast-url-dispatcher==0.1.0 +aiohttp-fast-url-dispatcher==0.3.0 aiohttp-zlib-ng==0.1.1 astral==2.2 attrs==23.1.0 @@ -19,7 +20,7 @@ ifaddr==0.2.0 Jinja2==3.1.2 lru-dict==1.2.0 PyJWT==2.8.0 -cryptography==41.0.5 +cryptography==41.0.7 pyOpenSSL==23.2.0 orjson==3.9.9 packaging>=23.1 diff --git a/requirements_all.txt b/requirements_all.txt index 2e00fcb26b8..0fe425756d7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1,4 +1,6 @@ # Home Assistant Core, full dependency set +# Automatically generated by gen_requirements_all.py, do not edit + -r requirements.txt # homeassistant.components.aemet @@ -8,7 +10,7 @@ AEMET-OpenData==0.4.6 AIOAladdinConnect==0.1.58 # homeassistant.components.honeywell -AIOSomecomfort==0.0.17 +AIOSomecomfort==0.0.24 # homeassistant.components.adax Adax-local==0.1.5 @@ -19,9 +21,6 @@ Ambiclimate==0.2.1 # homeassistant.components.blinksticklight BlinkStick==1.2.0 -# homeassistant.components.co2signal -CO2Signal==0.4.2 - # homeassistant.components.doorbird DoorBirdPy==2.1.0 @@ -55,7 +54,7 @@ ProgettiHWSW==0.1.3 # PyBluez==0.22 # homeassistant.components.cast -PyChromecast==13.0.7 +PyChromecast==13.0.8 # homeassistant.components.flick_electric PyFlick==0.0.2 @@ -113,7 +112,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.1 # homeassistant.components.vicare -PyViCare==2.28.1 +PyViCare==2.29.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -122,10 +121,7 @@ PyXiaomiGateway==0.14.3 RachioPy==1.0.3 # homeassistant.components.python_script -RestrictedPython==6.2;python_version<'3.12' - -# homeassistant.components.python_script -RestrictedPython==7.0a1.dev0;python_version>='3.12' +RestrictedPython==7.0 # homeassistant.components.remember_the_milk RtmAPI==0.7.2 @@ -147,10 +143,10 @@ TwitterAPI==2.7.12 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==2.1.0 +accuweather==2.1.1 # homeassistant.components.adax -adax==0.3.0 +adax==0.4.0 # homeassistant.components.androidtv adb-shell[async]==0.4.4 @@ -159,7 +155,7 @@ adb-shell[async]==0.4.4 adext==0.4.2 # homeassistant.components.adguard -adguardhome==0.6.2 +adguardhome==0.6.3 # homeassistant.components.advantage_air advantage-air==0.4.4 @@ -189,7 +185,7 @@ aio-geojson-usgs-earthquakes==0.2 aio-georss-gdacs==0.8 # homeassistant.components.airq -aioairq==0.2.4 +aioairq==0.3.1 # homeassistant.components.airzone_cloud aioairzone-cloud==0.3.6 @@ -216,7 +212,7 @@ aiobafi6==0.9.0 aiobotocore==2.6.0 # homeassistant.components.comelit -aiocomelit==0.3.0 +aiocomelit==0.6.2 # homeassistant.components.dhcp aiodiscover==1.5.1 @@ -233,11 +229,14 @@ aioeagle==1.1.0 # homeassistant.components.ecowitt aioecowitt==2023.5.0 +# homeassistant.components.co2signal +aioelectricitymaps==0.1.5 + # homeassistant.components.emonitor aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==18.4.0 +aioesphomeapi==19.2.1 # homeassistant.components.flo aioflo==2021.11.0 @@ -258,7 +257,7 @@ aioharmony==0.2.10 aiohomekit==3.0.9 # homeassistant.components.http -aiohttp-fast-url-dispatcher==0.1.0 +aiohttp-fast-url-dispatcher==0.3.0 # homeassistant.components.http aiohttp-zlib-ng==0.1.1 @@ -283,10 +282,10 @@ aiokef==0.2.16 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.4.5 +aiolifx-themes==0.4.10 # homeassistant.components.lifx -aiolifx==0.8.10 +aiolifx==1.0.0 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -354,7 +353,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==6.0.0 +aioshelly==6.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -375,16 +374,16 @@ aiosyncthing==0.5.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==65 +aiounifi==66 # homeassistant.components.vlc_telnet aiovlc==0.1.0 # homeassistant.components.vodafone_station -aiovodafone==0.4.2 +aiovodafone==0.4.3 # homeassistant.components.waqi -aiowaqi==3.0.0 +aiowaqi==3.0.1 # homeassistant.components.watttime aiowatttime==0.1.1 @@ -393,7 +392,7 @@ aiowatttime==0.1.1 aiowebostv==0.3.3 # homeassistant.components.withings -aiowithings==1.0.2 +aiowithings==2.0.0 # homeassistant.components.yandex_transport aioymaps==1.2.2 @@ -464,9 +463,6 @@ asmog==0.0.6 # homeassistant.components.asterisk_mbox asterisk_mbox==0.5.0 -# homeassistant.components.esphome -async-interrupt==1.1.1 - # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv @@ -527,10 +523,10 @@ beautifulsoup4==4.12.2 # beewi-smartclim==0.0.10 # homeassistant.components.zha -bellows==0.36.8 +bellows==0.37.1 # homeassistant.components.bmw_connected_drive -bimmer-connected==0.14.2 +bimmer-connected[china]==0.14.5 # homeassistant.components.bizkaibus bizkaibus==0.1.1 @@ -568,13 +564,13 @@ bluetooth-auto-recovery==1.2.3 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.14.0 +bluetooth-data-tools==1.16.0 # homeassistant.components.bond bond-async==0.2.1 # homeassistant.components.bosch_shc -boschshcpy==0.2.57 +boschshcpy==0.2.75 # homeassistant.components.amazon_polly # homeassistant.components.route53 @@ -637,7 +633,6 @@ concord232==0.15 # homeassistant.components.upc_connect connect-box==0.2.8 -# homeassistant.components.eq3btsmart # homeassistant.components.xiaomi_miio construct==2.10.68 @@ -685,6 +680,9 @@ demetriek==0.4.0 # homeassistant.components.denonavr denonavr==0.11.4 +# homeassistant.components.devialet +devialet==1.4.3 + # homeassistant.components.devolo_home_control devolo-home-control-api==0.18.2 @@ -725,7 +723,7 @@ dynalite-panel==0.0.4 eagle100==0.1.1 # homeassistant.components.easyenergy -easyenergy==0.3.0 +easyenergy==1.0.0 # homeassistant.components.ebusd ebusdpy==0.0.17 @@ -737,7 +735,7 @@ ecoaliface==0.4.0 electrickiwi-api==0.8.5 # homeassistant.components.elgato -elgato==5.1.0 +elgato==5.1.1 # homeassistant.components.eliqonline eliqonline==1.2.2 @@ -758,7 +756,7 @@ emulated-roku==0.2.1 energyflip-client==0.2.2 # homeassistant.components.energyzero -energyzero==0.5.0 +energyzero==1.0.0 # homeassistant.components.enocean enocean==0.50 @@ -791,7 +789,7 @@ eufylife-ble-client==0.1.8 # evdev==1.6.1 # homeassistant.components.evohome -evohome-async==0.4.6 +evohome-async==0.4.9 # homeassistant.components.faa_delays faadelays==2023.9.1 @@ -824,6 +822,9 @@ fixerio==1.0.0a0 # homeassistant.components.fjaraskupan fjaraskupan==2.2.0 +# homeassistant.components.flexit_bacnet +flexit_bacnet==2.1.0 + # homeassistant.components.flipr flipr-api==1.5.0 @@ -863,7 +864,7 @@ gardena-bluetooth==1.4.0 gassist-text==0.0.10 # homeassistant.components.google -gcal-sync==6.0.1 +gcal-sync==6.0.3 # homeassistant.components.geniushub geniushub-client==0.7.1 @@ -891,7 +892,7 @@ georss-qld-bushfire-alert-client==0.5 getmac==0.8.2 # homeassistant.components.gios -gios==3.2.1 +gios==3.2.2 # homeassistant.components.gitter gitterpy==0.1.7 @@ -946,7 +947,7 @@ greeneye_monitor==3.0.3 greenwavereality==0.5.1 # homeassistant.components.pure_energie -gridnet==4.2.0 +gridnet==5.0.0 # homeassistant.components.growatt_server growattServer==1.3.0 @@ -986,7 +987,7 @@ hass-nabucasa==0.74.0 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==1.2.5 +hassil==1.5.1 # homeassistant.components.jewish_calendar hdate==0.10.4 @@ -1013,13 +1014,13 @@ hlk-sw16==0.0.9 hole==0.8.0 # homeassistant.components.workday -holidays==0.35 +holidays==0.36 # homeassistant.components.frontend -home-assistant-frontend==20231030.2 +home-assistant-frontend==20231130.0 # homeassistant.components.conversation -home-assistant-intents==2023.10.16 +home-assistant-intents==2023.11.29 # homeassistant.components.home_connect homeconnect==0.7.2 @@ -1037,7 +1038,7 @@ horimote==0.4.1 httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.6.11 +huawei-lte-api==1.7.3 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -1062,7 +1063,7 @@ ical==6.1.0 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.3 +idasen-ha==2.4 # homeassistant.components.network ifaddr==0.2.0 @@ -1130,9 +1131,6 @@ kiwiki-client==0.1.1 # homeassistant.components.knx knx-frontend==2023.6.23.191712 -# homeassistant.components.komfovent -komfovent-api==0.0.3 - # homeassistant.components.konnected konnected==1.2.0 @@ -1178,6 +1176,9 @@ lightwave==0.24 # homeassistant.components.limitlessled limitlessled==1.1.3 +# homeassistant.components.linear_garage_door +linear-garage-door==0.2.7 + # homeassistant.components.linode linode-api==4.1.9b1 @@ -1248,7 +1249,7 @@ micloud==0.5 mill-local==0.3.0 # homeassistant.components.mill -millheater==0.11.6 +millheater==0.11.7 # homeassistant.components.minio minio==7.1.12 @@ -1260,7 +1261,7 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.0 # homeassistant.components.mopeka -mopeka-iot-ble==0.4.1 +mopeka-iot-ble==0.5.0 # homeassistant.components.motion_blinds motionblinds==0.6.18 @@ -1280,6 +1281,9 @@ mutagen==1.47.0 # homeassistant.components.mutesync mutesync==0.0.1 +# homeassistant.components.permobil +mypermobil==0.1.6 + # homeassistant.components.nad nad-receiver==0.3.0 @@ -1296,7 +1300,7 @@ netdata==1.1.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==2.2.1 +nettigo-air-monitor==2.2.2 # homeassistant.components.neurio_energy neurio==0.3.1 @@ -1311,10 +1315,10 @@ nextcloudmonitor==1.4.0 nextcord==2.0.0a8 # homeassistant.components.nextdns -nextdns==2.0.1 +nextdns==2.1.0 # homeassistant.components.nibe_heatpump -nibe==2.5.0 +nibe==2.5.2 # homeassistant.components.niko_home_control niko-home-control==0.2.1 @@ -1361,7 +1365,7 @@ oauth2client==4.1.3 objgraph==3.5.0 # homeassistant.components.garages_amsterdam -odp-amsterdam==5.3.1 +odp-amsterdam==6.0.0 # homeassistant.components.oem oemthermostat==1.1.1 @@ -1423,11 +1427,14 @@ oru==0.1.11 # homeassistant.components.orvibo orvibo==1.1.1 +# homeassistant.components.ourgroceries +ourgroceries==1.5.4 + # homeassistant.components.ovo_energy ovoenergy==1.2.0 # homeassistant.components.p1_monitor -p1monitor==2.1.1 +p1monitor==3.0.0 # homeassistant.components.mqtt paho-mqtt==1.6.1 @@ -1475,7 +1482,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.33.2 +plugwise==0.34.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1521,7 +1528,7 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.1.0 +pvo==2.1.1 # homeassistant.components.canary py-canary==0.5.3 @@ -1611,6 +1618,9 @@ pyairnow==1.2.1 # homeassistant.components.airvisual_pro pyairvisual==2023.08.1 +# homeassistant.components.asuswrt +pyasuswrt==0.1.20 + # homeassistant.components.atag pyatag==0.3.5.3 @@ -1675,7 +1685,7 @@ pydaikin==2.11.1 pydanfossair==0.1.0 # homeassistant.components.deconz -pydeconz==113 +pydeconz==114 # homeassistant.components.delijn pydelijn==1.1.0 @@ -1711,7 +1721,7 @@ pyedimax==0.2.1 pyefergy==22.1.1 # homeassistant.components.enphase_envoy -pyenphase==1.14.2 +pyenphase==1.14.3 # homeassistant.components.envisalink pyenvisalink==4.6 @@ -1786,7 +1796,7 @@ pyialarm==2.2.0 pyicloud==1.0.0 # homeassistant.components.insteon -pyinsteon==1.5.1 +pyinsteon==1.5.2 # homeassistant.components.intesishome pyintesishome==1.8.0 @@ -1828,7 +1838,7 @@ pykmtronic==0.3.0 pykodi==0.2.7 # homeassistant.components.kostal_plenticore -pykoplenti==1.0.0 +pykoplenti==1.2.2 # homeassistant.components.kraken pykrakenapi==0.1.8 @@ -1915,7 +1925,7 @@ pynuki==1.6.2 pynut2==2.1.2 # homeassistant.components.nws -pynws==1.5.1 +pynws==1.6.0 # homeassistant.components.nx584 pynx584==0.5 @@ -1950,7 +1960,7 @@ pyotgw==2.1.3 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.2 +pyoverkiz==1.13.3 # homeassistant.components.openweathermap pyowm==3.2.0 @@ -1995,7 +2005,7 @@ pyqwikswitch==0.93 pyrail==0.0.3 # homeassistant.components.rainbird -pyrainbird==4.0.0 +pyrainbird==4.0.1 # homeassistant.components.recswitch pyrecswitch==1.0.2 @@ -2087,7 +2097,7 @@ pysqueezebox==0.6.3 pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuez==0.1.19 +pysuez==0.2.0 # homeassistant.components.switchbee pyswitchbee==1.8.0 @@ -2122,9 +2132,6 @@ python-digitalocean==1.13.2 # homeassistant.components.ecobee python-ecobee-api==0.2.17 -# homeassistant.components.eq3btsmart -# python-eq3bt==0.2 - # homeassistant.components.etherscan python-etherscan-api==0.0.3 @@ -2144,7 +2151,7 @@ python-gc100==1.0.3a0 python-gitlab==1.6.0 # homeassistant.components.homewizard -python-homewizard-energy==3.1.0 +python-homewizard-energy==4.1.0 # homeassistant.components.hp_ilo python-hpilo==4.3 @@ -2165,7 +2172,7 @@ python-kasa[speedups]==0.5.4 # python-lirc==1.2.3 # homeassistant.components.matter -python-matter-server==4.0.0 +python-matter-server==5.0.0 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -2196,10 +2203,10 @@ python-qbittorrent==0.4.3 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==0.36.1 +python-roborock==0.36.2 # homeassistant.components.smarttub -python-smarttub==0.0.35 +python-smarttub==0.0.36 # homeassistant.components.songpal python-songpal==0.16 @@ -2235,7 +2242,7 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==0.3.8 +pytrafikverket==0.3.9.1 # homeassistant.components.v2c pytrydan==0.4.0 @@ -2337,7 +2344,7 @@ renault-api==0.2.0 renson-endura-delta==1.6.0 # homeassistant.components.reolink -reolink-aio==0.7.15 +reolink-aio==0.8.1 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2346,7 +2353,7 @@ rfk101py==0.0.1 rflink==0.0.65 # homeassistant.components.ring -ring-doorbell==0.7.3 +ring-doorbell[listen]==0.8.3 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2425,10 +2432,10 @@ sensorpro-ble==0.5.3 sensorpush-ble==1.5.5 # homeassistant.components.sentry -sentry-sdk==1.34.0 +sentry-sdk==1.37.1 # homeassistant.components.sfr_box -sfrbox-api==0.0.6 +sfrbox-api==0.0.8 # homeassistant.components.sharkiq sharkiq==1.0.2 @@ -2548,7 +2555,7 @@ switchbot-api==1.2.1 synology-srm==0.2.0 # homeassistant.components.system_bridge -systembridgeconnector==3.9.5 +systembridgeconnector==3.10.0 # homeassistant.components.tailscale tailscale==0.6.0 @@ -2635,7 +2642,7 @@ ttls==1.5.1 tuya-iot-py-sdk==0.6.6 # homeassistant.components.twentemilieu -twentemilieu==2.0.0 +twentemilieu==2.0.1 # homeassistant.components.twilio twilio==6.32.0 @@ -2656,7 +2663,7 @@ unifi-discovery==1.1.7 unifiled==0.11 # homeassistant.components.zha -universal-silabs-flasher==0.0.14 +universal-silabs-flasher==0.0.15 # homeassistant.components.upb upb-lib==0.5.4 @@ -2672,11 +2679,14 @@ url-normalize==1.4.3 # homeassistant.components.uvc uvcclient==0.11.0 +# homeassistant.components.roborock +vacuum-map-parser-roborock==0.1.1 + # homeassistant.components.vallox vallox-websocket-api==4.0.2 # homeassistant.components.rdw -vehicle==2.2.0 +vehicle==2.2.1 # homeassistant.components.velbus velbus-aio==2023.11.0 @@ -2782,7 +2792,7 @@ yalexs-ble==2.3.2 yalexs==1.10.0 # homeassistant.components.yeelight -yeelight==0.7.13 +yeelight==0.7.14 # homeassistant.components.yeelightsunflower yeelightsunflower==0.0.10 @@ -2797,22 +2807,22 @@ youless-api==1.0.1 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2023.10.13 +yt-dlp==2023.11.16 # homeassistant.components.zamg -zamg==0.3.0 +zamg==0.3.3 # homeassistant.components.zengge zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.125.0 +zeroconf==0.127.0 # homeassistant.components.zeversolar zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.106 +zha-quirks==0.0.107 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.9 @@ -2821,25 +2831,25 @@ zhong-hong-hvac==1.0.9 ziggo-mediabox-xl==1.1.0 # homeassistant.components.zha -zigpy-deconz==0.21.1 +zigpy-deconz==0.22.0 # homeassistant.components.zha -zigpy-xbee==0.19.0 +zigpy-xbee==0.20.0 # homeassistant.components.zha -zigpy-zigate==0.11.0 +zigpy-zigate==0.12.0 # homeassistant.components.zha -zigpy-znp==0.11.6 +zigpy-znp==0.12.0 # homeassistant.components.zha -zigpy==0.59.0 +zigpy==0.60.0 # homeassistant.components.zoneminder zm-py==0.5.2 # homeassistant.components.zwave_js -zwave-js-server-python==0.53.1 +zwave-js-server-python==0.54.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test.txt b/requirements_test.txt index bc88a59fc8e..d880fecaca5 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -11,7 +11,7 @@ astroid==3.0.1 coverage==7.3.2 freezegun==1.2.2 mock-open==1.4.0 -mypy==1.7.0 +mypy==1.7.1 pre-commit==3.5.0 pydantic==1.10.12 pylint==3.0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e05ad8c9fa7..5921c1739b8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -10,7 +10,7 @@ AEMET-OpenData==0.4.6 AIOAladdinConnect==0.1.58 # homeassistant.components.honeywell -AIOSomecomfort==0.0.17 +AIOSomecomfort==0.0.24 # homeassistant.components.adax Adax-local==0.1.5 @@ -18,9 +18,6 @@ Adax-local==0.1.5 # homeassistant.components.ambiclimate Ambiclimate==0.2.1 -# homeassistant.components.co2signal -CO2Signal==0.4.2 - # homeassistant.components.doorbird DoorBirdPy==2.1.0 @@ -48,7 +45,7 @@ PlexAPI==4.15.4 ProgettiHWSW==0.1.3 # homeassistant.components.cast -PyChromecast==13.0.7 +PyChromecast==13.0.8 # homeassistant.components.flick_electric PyFlick==0.0.2 @@ -100,7 +97,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.1 # homeassistant.components.vicare -PyViCare==2.28.1 +PyViCare==2.29.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -109,10 +106,7 @@ PyXiaomiGateway==0.14.3 RachioPy==1.0.3 # homeassistant.components.python_script -RestrictedPython==6.2;python_version<'3.12' - -# homeassistant.components.python_script -RestrictedPython==7.0a1.dev0;python_version>='3.12' +RestrictedPython==7.0 # homeassistant.components.remember_the_milk RtmAPI==0.7.2 @@ -128,10 +122,10 @@ Tami4EdgeAPI==2.1 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==2.1.0 +accuweather==2.1.1 # homeassistant.components.adax -adax==0.3.0 +adax==0.4.0 # homeassistant.components.androidtv adb-shell[async]==0.4.4 @@ -140,7 +134,7 @@ adb-shell[async]==0.4.4 adext==0.4.2 # homeassistant.components.adguard -adguardhome==0.6.2 +adguardhome==0.6.3 # homeassistant.components.advantage_air advantage-air==0.4.4 @@ -170,7 +164,7 @@ aio-geojson-usgs-earthquakes==0.2 aio-georss-gdacs==0.8 # homeassistant.components.airq -aioairq==0.2.4 +aioairq==0.3.1 # homeassistant.components.airzone_cloud aioairzone-cloud==0.3.6 @@ -197,7 +191,7 @@ aiobafi6==0.9.0 aiobotocore==2.6.0 # homeassistant.components.comelit -aiocomelit==0.3.0 +aiocomelit==0.6.2 # homeassistant.components.dhcp aiodiscover==1.5.1 @@ -214,11 +208,14 @@ aioeagle==1.1.0 # homeassistant.components.ecowitt aioecowitt==2023.5.0 +# homeassistant.components.co2signal +aioelectricitymaps==0.1.5 + # homeassistant.components.emonitor aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==18.4.0 +aioesphomeapi==19.2.1 # homeassistant.components.flo aioflo==2021.11.0 @@ -236,7 +233,7 @@ aioharmony==0.2.10 aiohomekit==3.0.9 # homeassistant.components.http -aiohttp-fast-url-dispatcher==0.1.0 +aiohttp-fast-url-dispatcher==0.3.0 # homeassistant.components.http aiohttp-zlib-ng==0.1.1 @@ -258,10 +255,10 @@ aiokafka==0.7.2 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.4.5 +aiolifx-themes==0.4.10 # homeassistant.components.lifx -aiolifx==0.8.10 +aiolifx==1.0.0 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -329,7 +326,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==6.0.0 +aioshelly==6.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -350,16 +347,16 @@ aiosyncthing==0.5.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==65 +aiounifi==66 # homeassistant.components.vlc_telnet aiovlc==0.1.0 # homeassistant.components.vodafone_station -aiovodafone==0.4.2 +aiovodafone==0.4.3 # homeassistant.components.waqi -aiowaqi==3.0.0 +aiowaqi==3.0.1 # homeassistant.components.watttime aiowatttime==0.1.1 @@ -368,7 +365,7 @@ aiowatttime==0.1.1 aiowebostv==0.3.3 # homeassistant.components.withings -aiowithings==1.0.2 +aiowithings==2.0.0 # homeassistant.components.yandex_transport aioymaps==1.2.2 @@ -418,9 +415,6 @@ aranet4==2.2.2 # homeassistant.components.arcam_fmj arcam-fmj==1.4.0 -# homeassistant.components.esphome -async-interrupt==1.1.1 - # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv @@ -451,10 +445,10 @@ base36==0.1.1 beautifulsoup4==4.12.2 # homeassistant.components.zha -bellows==0.36.8 +bellows==0.37.1 # homeassistant.components.bmw_connected_drive -bimmer-connected==0.14.2 +bimmer-connected[china]==0.14.5 # homeassistant.components.bluetooth bleak-retry-connector==3.3.0 @@ -482,13 +476,13 @@ bluetooth-auto-recovery==1.2.3 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.14.0 +bluetooth-data-tools==1.16.0 # homeassistant.components.bond bond-async==0.2.1 # homeassistant.components.bosch_shc -boschshcpy==0.2.57 +boschshcpy==0.2.75 # homeassistant.components.broadlink broadlink==0.18.3 @@ -520,7 +514,6 @@ colorlog==6.7.0 # homeassistant.components.color_extractor colorthief==0.2.1 -# homeassistant.components.eq3btsmart # homeassistant.components.xiaomi_miio construct==2.10.68 @@ -562,6 +555,9 @@ demetriek==0.4.0 # homeassistant.components.denonavr denonavr==0.11.4 +# homeassistant.components.devialet +devialet==1.4.3 + # homeassistant.components.devolo_home_control devolo-home-control-api==0.18.2 @@ -593,13 +589,13 @@ dynalite-panel==0.0.4 eagle100==0.1.1 # homeassistant.components.easyenergy -easyenergy==0.3.0 +easyenergy==1.0.0 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 # homeassistant.components.elgato -elgato==5.1.0 +elgato==5.1.1 # homeassistant.components.elkm1 elkm1-lib==2.2.6 @@ -614,7 +610,7 @@ emulated-roku==0.2.1 energyflip-client==0.2.2 # homeassistant.components.energyzero -energyzero==0.5.0 +energyzero==1.0.0 # homeassistant.components.enocean enocean==0.50 @@ -637,6 +633,9 @@ eufylife-ble-client==0.1.8 # homeassistant.components.faa_delays faadelays==2023.9.1 +# homeassistant.components.fastdotcom +fastdotcom==0.0.3 + # homeassistant.components.feedreader feedparser==6.0.10 @@ -652,6 +651,9 @@ fivem-api==0.1.2 # homeassistant.components.fjaraskupan fjaraskupan==2.2.0 +# homeassistant.components.flexit_bacnet +flexit_bacnet==2.1.0 + # homeassistant.components.flipr flipr-api==1.5.0 @@ -685,7 +687,7 @@ gardena-bluetooth==1.4.0 gassist-text==0.0.10 # homeassistant.components.google -gcal-sync==6.0.1 +gcal-sync==6.0.3 # homeassistant.components.geocaching geocachingapi==0.2.1 @@ -710,7 +712,7 @@ georss-qld-bushfire-alert-client==0.5 getmac==0.8.2 # homeassistant.components.gios -gios==3.2.1 +gios==3.2.2 # homeassistant.components.glances glances-api==0.4.3 @@ -747,7 +749,7 @@ greeclimate==1.4.1 greeneye_monitor==3.0.3 # homeassistant.components.pure_energie -gridnet==4.2.0 +gridnet==5.0.0 # homeassistant.components.growatt_server growattServer==1.3.0 @@ -781,7 +783,7 @@ habitipy==0.2.0 hass-nabucasa==0.74.0 # homeassistant.components.conversation -hassil==1.2.5 +hassil==1.5.1 # homeassistant.components.jewish_calendar hdate==0.10.4 @@ -799,13 +801,13 @@ hlk-sw16==0.0.9 hole==0.8.0 # homeassistant.components.workday -holidays==0.35 +holidays==0.36 # homeassistant.components.frontend -home-assistant-frontend==20231030.2 +home-assistant-frontend==20231130.0 # homeassistant.components.conversation -home-assistant-intents==2023.10.16 +home-assistant-intents==2023.11.29 # homeassistant.components.home_connect homeconnect==0.7.2 @@ -820,7 +822,7 @@ homepluscontrol==0.0.5 httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.6.11 +huawei-lte-api==1.7.3 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -839,7 +841,7 @@ ical==6.1.0 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.3 +idasen-ha==2.4 # homeassistant.components.network ifaddr==0.2.0 @@ -886,9 +888,6 @@ kegtron-ble==0.4.0 # homeassistant.components.knx knx-frontend==2023.6.23.191712 -# homeassistant.components.komfovent -komfovent-api==0.0.3 - # homeassistant.components.konnected konnected==1.2.0 @@ -919,6 +918,9 @@ libsoundtouch==0.8 # homeassistant.components.life360 life360==6.0.0 +# homeassistant.components.linear_garage_door +linear-garage-door==0.2.7 + # homeassistant.components.logi_circle logi-circle==0.2.3 @@ -971,7 +973,7 @@ micloud==0.5 mill-local==0.3.0 # homeassistant.components.mill -millheater==0.11.6 +millheater==0.11.7 # homeassistant.components.minio minio==7.1.12 @@ -983,7 +985,7 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.0 # homeassistant.components.mopeka -mopeka-iot-ble==0.4.1 +mopeka-iot-ble==0.5.0 # homeassistant.components.motion_blinds motionblinds==0.6.18 @@ -1003,6 +1005,9 @@ mutagen==1.47.0 # homeassistant.components.mutesync mutesync==0.0.1 +# homeassistant.components.permobil +mypermobil==0.1.6 + # homeassistant.components.keenetic_ndms2 ndms2-client==0.1.2 @@ -1013,7 +1018,7 @@ nessclient==1.0.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==2.2.1 +nettigo-air-monitor==2.2.2 # homeassistant.components.nexia nexia==2.0.7 @@ -1025,10 +1030,10 @@ nextcloudmonitor==1.4.0 nextcord==2.0.0a8 # homeassistant.components.nextdns -nextdns==2.0.1 +nextdns==2.1.0 # homeassistant.components.nibe_heatpump -nibe==2.5.0 +nibe==2.5.2 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 @@ -1060,7 +1065,7 @@ oauth2client==4.1.3 objgraph==3.5.0 # homeassistant.components.garages_amsterdam -odp-amsterdam==5.3.1 +odp-amsterdam==6.0.0 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1092,11 +1097,14 @@ opower==0.0.39 # homeassistant.components.oralb oralb-ble==0.17.6 +# homeassistant.components.ourgroceries +ourgroceries==1.5.4 + # homeassistant.components.ovo_energy ovoenergy==1.2.0 # homeassistant.components.p1_monitor -p1monitor==2.1.1 +p1monitor==3.0.0 # homeassistant.components.mqtt paho-mqtt==1.6.1 @@ -1132,7 +1140,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.33.2 +plugwise==0.34.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1163,7 +1171,7 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.1.0 +pvo==2.1.1 # homeassistant.components.canary py-canary==0.5.3 @@ -1226,6 +1234,9 @@ pyairnow==1.2.1 # homeassistant.components.airvisual_pro pyairvisual==2023.08.1 +# homeassistant.components.asuswrt +pyasuswrt==0.1.20 + # homeassistant.components.atag pyatag==0.3.5.3 @@ -1266,7 +1277,7 @@ pycsspeechtts==1.0.8 pydaikin==2.11.1 # homeassistant.components.deconz -pydeconz==113 +pydeconz==114 # homeassistant.components.dexcom pydexcom==0.2.3 @@ -1290,7 +1301,7 @@ pyeconet==0.1.22 pyefergy==22.1.1 # homeassistant.components.enphase_envoy -pyenphase==1.14.2 +pyenphase==1.14.3 # homeassistant.components.everlights pyeverlights==0.1.0 @@ -1347,7 +1358,7 @@ pyialarm==2.2.0 pyicloud==1.0.0 # homeassistant.components.insteon -pyinsteon==1.5.1 +pyinsteon==1.5.2 # homeassistant.components.ipma pyipma==3.0.7 @@ -1380,7 +1391,7 @@ pykmtronic==0.3.0 pykodi==0.2.7 # homeassistant.components.kostal_plenticore -pykoplenti==1.0.0 +pykoplenti==1.2.2 # homeassistant.components.kraken pykrakenapi==0.1.8 @@ -1443,7 +1454,7 @@ pynuki==1.6.2 pynut2==2.1.2 # homeassistant.components.nws -pynws==1.5.1 +pynws==1.6.0 # homeassistant.components.nx584 pynx584==0.5 @@ -1472,7 +1483,7 @@ pyotgw==2.1.3 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.2 +pyoverkiz==1.13.3 # homeassistant.components.openweathermap pyowm==3.2.0 @@ -1508,7 +1519,7 @@ pyps4-2ndscreen==1.3.1 pyqwikswitch==0.93 # homeassistant.components.rainbird -pyrainbird==4.0.0 +pyrainbird==4.0.1 # homeassistant.components.risco pyrisco==0.5.8 @@ -1603,7 +1614,7 @@ python-ecobee-api==0.2.17 python-fullykiosk==0.0.12 # homeassistant.components.homewizard -python-homewizard-energy==3.1.0 +python-homewizard-energy==4.1.0 # homeassistant.components.izone python-izone==1.2.9 @@ -1615,7 +1626,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.5.4 # homeassistant.components.matter -python-matter-server==4.0.0 +python-matter-server==5.0.0 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -1637,10 +1648,10 @@ python-picnic-api==1.1.0 python-qbittorrent==0.4.3 # homeassistant.components.roborock -python-roborock==0.36.1 +python-roborock==0.36.2 # homeassistant.components.smarttub -python-smarttub==0.0.35 +python-smarttub==0.0.36 # homeassistant.components.songpal python-songpal==0.16 @@ -1667,7 +1678,7 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==0.3.8 +pytrafikverket==0.3.9.1 # homeassistant.components.v2c pytrydan==0.4.0 @@ -1745,13 +1756,13 @@ renault-api==0.2.0 renson-endura-delta==1.6.0 # homeassistant.components.reolink -reolink-aio==0.7.15 +reolink-aio==0.8.1 # homeassistant.components.rflink rflink==0.0.65 # homeassistant.components.ring -ring-doorbell==0.7.3 +ring-doorbell[listen]==0.8.3 # homeassistant.components.roku rokuecp==0.18.1 @@ -1803,10 +1814,10 @@ sensorpro-ble==0.5.3 sensorpush-ble==1.5.5 # homeassistant.components.sentry -sentry-sdk==1.34.0 +sentry-sdk==1.37.1 # homeassistant.components.sfr_box -sfrbox-api==0.0.6 +sfrbox-api==0.0.8 # homeassistant.components.sharkiq sharkiq==1.0.2 @@ -1899,7 +1910,7 @@ surepy==0.8.0 switchbot-api==1.2.1 # homeassistant.components.system_bridge -systembridgeconnector==3.9.5 +systembridgeconnector==3.10.0 # homeassistant.components.tailscale tailscale==0.6.0 @@ -1953,7 +1964,7 @@ ttls==1.5.1 tuya-iot-py-sdk==0.6.6 # homeassistant.components.twentemilieu -twentemilieu==2.0.0 +twentemilieu==2.0.1 # homeassistant.components.twilio twilio==6.32.0 @@ -1971,7 +1982,7 @@ ultraheat-api==0.5.7 unifi-discovery==1.1.7 # homeassistant.components.zha -universal-silabs-flasher==0.0.14 +universal-silabs-flasher==0.0.15 # homeassistant.components.upb upb-lib==0.5.4 @@ -1987,11 +1998,14 @@ url-normalize==1.4.3 # homeassistant.components.uvc uvcclient==0.11.0 +# homeassistant.components.roborock +vacuum-map-parser-roborock==0.1.1 + # homeassistant.components.vallox vallox-websocket-api==4.0.2 # homeassistant.components.rdw -vehicle==2.2.0 +vehicle==2.2.1 # homeassistant.components.velbus velbus-aio==2023.11.0 @@ -2079,7 +2093,7 @@ yalexs-ble==2.3.2 yalexs==1.10.0 # homeassistant.components.yeelight -yeelight==0.7.13 +yeelight==0.7.14 # homeassistant.components.yolink yolink-api==0.3.1 @@ -2091,37 +2105,37 @@ youless-api==1.0.1 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2023.10.13 +yt-dlp==2023.11.16 # homeassistant.components.zamg -zamg==0.3.0 +zamg==0.3.3 # homeassistant.components.zeroconf -zeroconf==0.125.0 +zeroconf==0.127.0 # homeassistant.components.zeversolar zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.106 +zha-quirks==0.0.107 # homeassistant.components.zha -zigpy-deconz==0.21.1 +zigpy-deconz==0.22.0 # homeassistant.components.zha -zigpy-xbee==0.19.0 +zigpy-xbee==0.20.0 # homeassistant.components.zha -zigpy-zigate==0.11.0 +zigpy-zigate==0.12.0 # homeassistant.components.zha -zigpy-znp==0.11.6 +zigpy-znp==0.12.0 # homeassistant.components.zha -zigpy==0.59.0 +zigpy==0.60.0 # homeassistant.components.zwave_js -zwave-js-server-python==0.53.1 +zwave-js-server-python==0.54.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 03c46de6b37..c797db4b7a3 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,6 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit -black==23.11.0 codespell==2.2.2 -ruff==0.1.1 +ruff==0.1.6 yamllint==1.32.0 diff --git a/script/check_format b/script/check_format index bed35ec63e4..09dbb0abe86 100755 --- a/script/check_format +++ b/script/check_format @@ -1,10 +1,10 @@ #!/bin/sh -# Format code with black. +# Format code with ruff-format. cd "$(dirname "$0")/.." -black \ +ruff \ + format \ --check \ - --fast \ --quiet \ homeassistant tests script *.py diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index cb202ed0466..f6835fdbaf1 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -32,7 +32,6 @@ COMMENT_REQUIREMENTS = ( "pybluez", "pycocotools", "pycups", - "python-eq3bt", "python-gammu", "python-lirc", "pyuserinput", @@ -150,7 +149,7 @@ pyOpenSSL>=23.1.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.0 +protobuf==4.25.1 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -182,12 +181,17 @@ get-mac==1000000000.0.0 charset-normalizer==3.2.0 """ +GENERATED_MESSAGE = ( + f"# Automatically generated by {Path(__file__).name}, do not edit\n\n" +) + IGNORE_PRE_COMMIT_HOOK_ID = ( "check-executables-have-shebangs", "check-json", "no-commit-to-branch", "prettier", "python-typing-update", + "ruff-format", # it's just ruff ) PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$") @@ -354,6 +358,7 @@ def generate_requirements_list(reqs: dict[str, list[str]]) -> str: def requirements_output() -> str: """Generate output for requirements.""" output = [ + GENERATED_MESSAGE, "-c homeassistant/package_constraints.txt\n", "\n", "# Home Assistant Core\n", @@ -368,6 +373,7 @@ def requirements_all_output(reqs: dict[str, list[str]]) -> str: """Generate output for requirements_all.""" output = [ "# Home Assistant Core, full dependency set\n", + GENERATED_MESSAGE, "-r requirements.txt\n", ] output.append(generate_requirements_list(reqs)) @@ -379,8 +385,7 @@ def requirements_test_all_output(reqs: dict[str, list[str]]) -> str: """Generate output for test_requirements.""" output = [ "# Home Assistant tests, full dependency set\n", - f"# Automatically generated by {Path(__file__).name}, do not edit\n", - "\n", + GENERATED_MESSAGE, "-r requirements_test.txt\n", ] @@ -389,7 +394,8 @@ def requirements_test_all_output(reqs: dict[str, list[str]]) -> str: for requirement, modules in reqs.items() if any( # Always install requirements that are not part of integrations - not mdl.startswith("homeassistant.components.") or + not mdl.startswith("homeassistant.components.") + or # Install tests for integrations that have tests has_tests(mdl) for mdl in modules @@ -425,7 +431,8 @@ def requirements_pre_commit_output() -> str: def gather_constraints() -> str: """Construct output for constraint file.""" return ( - "\n".join( + GENERATED_MESSAGE + + "\n".join( sorted( { *core_requirements(), diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index 32803731ecd..c454c69d141 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -16,6 +16,7 @@ from . import ( coverage, dependencies, dhcp, + docker, json, manifest, metadata, @@ -50,6 +51,7 @@ INTEGRATION_PLUGINS = [ ] HASS_PLUGINS = [ coverage, + docker, mypy_config, metadata, ] diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py new file mode 100644 index 00000000000..3bd44736038 --- /dev/null +++ b/script/hassfest/docker.py @@ -0,0 +1,89 @@ +"""Generate and validate the dockerfile.""" +from homeassistant import core +from homeassistant.util import executor, thread + +from .model import Config, Integration + +DOCKERFILE_TEMPLATE = r"""# Automatically generated by hassfest. +# +# To update, run python3 -m script.hassfest -p docker +ARG BUILD_FROM +FROM ${{BUILD_FROM}} + +# Synchronize with homeassistant/core.py:async_stop +ENV \ + S6_SERVICES_GRACETIME={timeout} + +ARG QEMU_CPU + +WORKDIR /usr/src + +## Setup Home Assistant Core dependencies +COPY requirements.txt homeassistant/ +COPY homeassistant/package_constraints.txt homeassistant/homeassistant/ +RUN \ + pip3 install \ + --only-binary=:all: \ + -r homeassistant/requirements.txt + +COPY requirements_all.txt home_assistant_frontend-* home_assistant_intents-* homeassistant/ +RUN \ + if ls homeassistant/home_assistant_frontend*.whl 1> /dev/null 2>&1; then \ + pip3 install homeassistant/home_assistant_frontend-*.whl; \ + fi \ + && if ls homeassistant/home_assistant_intents*.whl 1> /dev/null 2>&1; then \ + pip3 install homeassistant/home_assistant_intents-*.whl; \ + fi \ + && \ + LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \ + MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \ + pip3 install \ + --only-binary=:all: \ + -r homeassistant/requirements_all.txt + +## Setup Home Assistant Core +COPY . homeassistant/ +RUN \ + pip3 install \ + --only-binary=:all: \ + -e ./homeassistant \ + && python3 -m compileall \ + homeassistant/homeassistant + +# Home Assistant S6-Overlay +COPY rootfs / + +WORKDIR /config +""" + + +def _generate_dockerfile() -> str: + timeout = ( + core.STAGE_1_SHUTDOWN_TIMEOUT + + core.STAGE_2_SHUTDOWN_TIMEOUT + + core.STAGE_3_SHUTDOWN_TIMEOUT + + executor.EXECUTOR_SHUTDOWN_TIMEOUT + + thread.THREADING_SHUTDOWN_TIMEOUT + + 10 + ) + return DOCKERFILE_TEMPLATE.format(timeout=timeout * 1000) + + +def validate(integrations: dict[str, Integration], config: Config) -> None: + """Validate dockerfile.""" + dockerfile_content = _generate_dockerfile() + config.cache["dockerfile"] = dockerfile_content + + dockerfile_path = config.root / "Dockerfile" + if dockerfile_path.read_text() != dockerfile_content: + config.add_error( + "docker", + "File Dockerfile is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) + + +def generate(integrations: dict[str, Integration], config: Config) -> None: + """Generate dockerfile.""" + dockerfile_path = config.root / "Dockerfile" + dockerfile_path.write_text(config.cache["dockerfile"]) diff --git a/script/hassfest/serializer.py b/script/hassfest/serializer.py index 499ee9d51d9..b56306a8d7e 100644 --- a/script/hassfest/serializer.py +++ b/script/hassfest/serializer.py @@ -2,11 +2,10 @@ from __future__ import annotations from collections.abc import Collection, Iterable, Mapping +import shutil +import subprocess from typing import Any -import black -from black.mode import Mode - DEFAULT_GENERATOR = "script.hassfest" @@ -72,7 +71,14 @@ To update, run python3 -m {generator} {content} """ - return black.format_str(content.strip(), mode=Mode()) + ruff = shutil.which("ruff") + if not ruff: + raise RuntimeError("ruff not found") + return subprocess.check_output( + [ruff, "format", "-"], + input=content.strip(), + encoding="utf-8", + ) def format_python_namespace( diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 950eeb827ba..fa2956dd47d 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -215,6 +215,29 @@ def gen_data_entry_schema( return vol.All(*validators) +def gen_issues_schema(config: Config, integration: Integration) -> dict[str, Any]: + """Generate the issues schema.""" + return { + str: vol.All( + cv.has_at_least_one_key("description", "fix_flow"), + vol.Schema( + { + vol.Required("title"): translation_value_validator, + vol.Exclusive( + "description", "fixable" + ): translation_value_validator, + vol.Exclusive("fix_flow", "fixable"): gen_data_entry_schema( + config=config, + integration=integration, + flow_title=UNDEFINED, + require_step_title=False, + ), + }, + ), + ) + } + + def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: """Generate a strings schema.""" return vol.Schema( @@ -266,25 +289,7 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: vol.Optional("application_credentials"): { vol.Optional("description"): translation_value_validator, }, - vol.Optional("issues"): { - str: vol.All( - cv.has_at_least_one_key("description", "fix_flow"), - vol.Schema( - { - vol.Required("title"): translation_value_validator, - vol.Exclusive( - "description", "fixable" - ): translation_value_validator, - vol.Exclusive("fix_flow", "fixable"): gen_data_entry_schema( - config=config, - integration=integration, - flow_title=UNDEFINED, - require_step_title=False, - ), - }, - ), - ) - }, + vol.Optional("issues"): gen_issues_schema(config, integration), vol.Optional("entity_component"): cv.schema_with_slug_keys( { vol.Optional("name"): str, @@ -362,7 +367,8 @@ def gen_auth_schema(config: Config, integration: Integration) -> vol.Schema: flow_title=REQUIRED, require_step_title=True, ) - } + }, + vol.Optional("issues"): gen_issues_schema(config, integration), } ) diff --git a/script/lint_and_test.py b/script/lint_and_test.py index ee28d4765d6..48809ae4dcd 100755 --- a/script/lint_and_test.py +++ b/script/lint_and_test.py @@ -224,6 +224,7 @@ async def main(): code, _ = await async_exec( "python3", + "-b", "-m", "pytest", "-vv", diff --git a/script/scaffold/__main__.py b/script/scaffold/__main__.py index 8dafd8fa802..ddbd1189e11 100644 --- a/script/scaffold/__main__.py +++ b/script/scaffold/__main__.py @@ -103,10 +103,11 @@ def main(): if args.develop: print("Running tests") - print(f"$ python3 -m pytest -vvv tests/components/{info.domain}") + print(f"$ python3 -b -m pytest -vvv tests/components/{info.domain}") subprocess.run( [ "python3", + "-b", "-m", "pytest", "-vvv", diff --git a/tests/auth/providers/test_command_line.py b/tests/auth/providers/test_command_line.py index 97f8f659397..a92d41a8c5f 100644 --- a/tests/auth/providers/test_command_line.py +++ b/tests/auth/providers/test_command_line.py @@ -50,6 +50,9 @@ async def test_create_new_credential(manager, provider) -> None: user = await manager.async_get_or_create_user(credentials) assert user.is_active + assert len(user.groups) == 1 + assert user.groups[0].id == "system-admin" + assert not user.local_only async def test_match_existing_credentials(store, provider) -> None: @@ -100,6 +103,9 @@ async def test_good_auth_with_meta(manager, provider) -> None: user = await manager.async_get_or_create_user(credentials) assert user.name == "Bob" assert user.is_active + assert len(user.groups) == 1 + assert user.groups[0].id == "system-users" + assert user.local_only async def test_utf_8_username_password(provider) -> None: diff --git a/tests/auth/providers/test_command_line_cmd.sh b/tests/auth/providers/test_command_line_cmd.sh index 0e689e338f1..4cbd7946a29 100755 --- a/tests/auth/providers/test_command_line_cmd.sh +++ b/tests/auth/providers/test_command_line_cmd.sh @@ -4,6 +4,8 @@ if [ "$username" = "good-user" ] && [ "$password" = "good-pass" ]; then echo "Auth should succeed." >&2 if [ "$1" = "--with-meta" ]; then echo "name=Bob" + echo "group=system-users" + echo "local_only=true" fi exit 0 fi diff --git a/tests/auth/providers/test_legacy_api_password.py b/tests/auth/providers/test_legacy_api_password.py index 7c2335f7ccc..3d89c577ebf 100644 --- a/tests/auth/providers/test_legacy_api_password.py +++ b/tests/auth/providers/test_legacy_api_password.py @@ -5,6 +5,12 @@ from homeassistant import auth, data_entry_flow from homeassistant.auth import auth_store from homeassistant.auth.providers import legacy_api_password from homeassistant.core import HomeAssistant +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component + +from tests.common import ensure_auth_manager_loaded + +CONFIG = {"type": "legacy_api_password", "api_password": "test-password"} @pytest.fixture @@ -16,9 +22,7 @@ def store(hass): @pytest.fixture def provider(hass, store): """Mock provider.""" - return legacy_api_password.LegacyApiPasswordAuthProvider( - hass, store, {"type": "legacy_api_password", "api_password": "test-password"} - ) + return legacy_api_password.LegacyApiPasswordAuthProvider(hass, store, CONFIG) @pytest.fixture @@ -68,3 +72,15 @@ async def test_login_flow_works(hass: HomeAssistant, manager) -> None: flow_id=result["flow_id"], user_input={"password": "test-password"} ) assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + + +async def test_create_repair_issue(hass: HomeAssistant): + """Test legacy api password auth provider creates a reapir issue.""" + hass.auth = await auth.auth_manager_from_config(hass, [CONFIG], []) + ensure_auth_manager_loaded(hass.auth) + await async_setup_component(hass, "auth", {}) + issue_registry: ir.IssueRegistry = ir.async_get(hass) + + assert issue_registry.async_get_issue( + domain="auth", issue_id="deprecated_legacy_api_password" + ) diff --git a/tests/common.py b/tests/common.py index 1737eae21e6..b2fa53d28fb 100644 --- a/tests/common.py +++ b/tests/common.py @@ -267,7 +267,7 @@ async def async_test_home_assistant(event_loop, load_registries=True): "homeassistant.helpers.restore_state.RestoreStateData.async_setup_dump", return_value=None, ), patch( - "homeassistant.helpers.restore_state.start.async_at_start" + "homeassistant.helpers.restore_state.start.async_at_start", ): await asyncio.gather( ar.async_load(hass), @@ -297,6 +297,7 @@ def async_mock_service( schema: vol.Schema | None = None, response: ServiceResponse = None, supports_response: SupportsResponse | None = None, + raise_exception: Exception | None = None, ) -> list[ServiceCall]: """Set up a fake service & return a calls log list to this service.""" calls = [] @@ -305,6 +306,8 @@ def async_mock_service( def mock_service_log(call): # pylint: disable=unnecessary-lambda """Mock service call.""" calls.append(call) + if raise_exception is not None: + raise raise_exception return response if supports_response is None: @@ -984,7 +987,10 @@ def assert_setup_component(count, domain=None): async def mock_psc(hass, config_input, integration): """Mock the prepare_setup_component to capture config.""" domain_input = integration.domain - res = await async_process_component_config(hass, config_input, integration) + integration_config_info = await async_process_component_config( + hass, config_input, integration + ) + res = integration_config_info.config config[domain_input] = None if res is None else res.get(domain_input) _LOGGER.debug( "Configuration for %s, Validated: %s, Original %s", @@ -992,7 +998,7 @@ def assert_setup_component(count, domain=None): config[domain_input], config_input.get(domain_input), ) - return res + return integration_config_info assert isinstance(config, dict) with patch("homeassistant.config.async_process_component_config", mock_psc): @@ -1301,11 +1307,12 @@ async def get_system_health_info(hass: HomeAssistant, domain: str) -> dict[str, @contextmanager def mock_config_flow(domain: str, config_flow: type[ConfigFlow]) -> None: """Mock a config flow handler.""" - assert domain not in config_entries.HANDLERS + handler = config_entries.HANDLERS.get(domain) config_entries.HANDLERS[domain] = config_flow _LOGGER.info("Adding mock config flow: %s", domain) yield - config_entries.HANDLERS.pop(domain) + if handler: + config_entries.HANDLERS[domain] = handler def mock_integration( @@ -1339,18 +1346,6 @@ def mock_integration( return integration -def mock_entity_platform( - hass: HomeAssistant, platform_path: str, module: MockPlatform | None -) -> None: - """Mock a entity platform. - - platform_path is in form light.hue. Will create platform - hue.light. - """ - domain, platform_name = platform_path.split(".") - mock_platform(hass, f"{platform_name}.{domain}", module) - - def mock_platform( hass: HomeAssistant, platform_path: str, module: Mock | MockPlatform | None = None ) -> None: diff --git a/tests/components/accuweather/snapshots/test_weather.ambr b/tests/components/accuweather/snapshots/test_weather.ambr index 521393af71b..081e7bf595a 100644 --- a/tests/components/accuweather/snapshots/test_weather.ambr +++ b/tests/components/accuweather/snapshots/test_weather.ambr @@ -75,6 +75,238 @@ ]), }) # --- +# name: test_forecast_service[forecast] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 58, + 'condition': 'lightning-rainy', + 'datetime': '2020-07-26T05:00:00+00:00', + 'precipitation': 2.5, + 'precipitation_probability': 60, + 'temperature': 29.5, + 'templow': 15.4, + 'uv_index': 5, + 'wind_bearing': 166, + 'wind_gust_speed': 29.6, + 'wind_speed': 13.0, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 52, + 'condition': 'partlycloudy', + 'datetime': '2020-07-27T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 26.2, + 'templow': 15.9, + 'uv_index': 7, + 'wind_bearing': 297, + 'wind_gust_speed': 14.8, + 'wind_speed': 9.3, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 65, + 'condition': 'partlycloudy', + 'datetime': '2020-07-28T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 31.7, + 'templow': 16.8, + 'uv_index': 7, + 'wind_bearing': 198, + 'wind_gust_speed': 24.1, + 'wind_speed': 16.7, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 45, + 'condition': 'partlycloudy', + 'datetime': '2020-07-29T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 9, + 'temperature': 24.0, + 'templow': 11.7, + 'uv_index': 6, + 'wind_bearing': 293, + 'wind_gust_speed': 24.1, + 'wind_speed': 13.0, + }), + dict({ + 'apparent_temperature': 22.2, + 'cloud_coverage': 50, + 'condition': 'partlycloudy', + 'datetime': '2020-07-30T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 1, + 'temperature': 21.4, + 'templow': 12.2, + 'uv_index': 7, + 'wind_bearing': 280, + 'wind_gust_speed': 27.8, + 'wind_speed': 18.5, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 58, + 'condition': 'lightning-rainy', + 'datetime': '2020-07-26T05:00:00+00:00', + 'precipitation': 2.5, + 'precipitation_probability': 60, + 'temperature': 29.5, + 'templow': 15.4, + 'uv_index': 5, + 'wind_bearing': 166, + 'wind_gust_speed': 29.6, + 'wind_speed': 13.0, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 52, + 'condition': 'partlycloudy', + 'datetime': '2020-07-27T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 26.2, + 'templow': 15.9, + 'uv_index': 7, + 'wind_bearing': 297, + 'wind_gust_speed': 14.8, + 'wind_speed': 9.3, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 65, + 'condition': 'partlycloudy', + 'datetime': '2020-07-28T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 31.7, + 'templow': 16.8, + 'uv_index': 7, + 'wind_bearing': 198, + 'wind_gust_speed': 24.1, + 'wind_speed': 16.7, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 45, + 'condition': 'partlycloudy', + 'datetime': '2020-07-29T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 9, + 'temperature': 24.0, + 'templow': 11.7, + 'uv_index': 6, + 'wind_bearing': 293, + 'wind_gust_speed': 24.1, + 'wind_speed': 13.0, + }), + dict({ + 'apparent_temperature': 22.2, + 'cloud_coverage': 50, + 'condition': 'partlycloudy', + 'datetime': '2020-07-30T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 1, + 'temperature': 21.4, + 'templow': 12.2, + 'uv_index': 7, + 'wind_bearing': 280, + 'wind_gust_speed': 27.8, + 'wind_speed': 18.5, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecasts] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 58, + 'condition': 'lightning-rainy', + 'datetime': '2020-07-26T05:00:00+00:00', + 'precipitation': 2.5, + 'precipitation_probability': 60, + 'temperature': 29.5, + 'templow': 15.4, + 'uv_index': 5, + 'wind_bearing': 166, + 'wind_gust_speed': 29.6, + 'wind_speed': 13.0, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 52, + 'condition': 'partlycloudy', + 'datetime': '2020-07-27T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 26.2, + 'templow': 15.9, + 'uv_index': 7, + 'wind_bearing': 297, + 'wind_gust_speed': 14.8, + 'wind_speed': 9.3, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 65, + 'condition': 'partlycloudy', + 'datetime': '2020-07-28T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 31.7, + 'templow': 16.8, + 'uv_index': 7, + 'wind_bearing': 198, + 'wind_gust_speed': 24.1, + 'wind_speed': 16.7, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 45, + 'condition': 'partlycloudy', + 'datetime': '2020-07-29T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 9, + 'temperature': 24.0, + 'templow': 11.7, + 'uv_index': 6, + 'wind_bearing': 293, + 'wind_gust_speed': 24.1, + 'wind_speed': 13.0, + }), + dict({ + 'apparent_temperature': 22.2, + 'cloud_coverage': 50, + 'condition': 'partlycloudy', + 'datetime': '2020-07-30T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 1, + 'temperature': 21.4, + 'templow': 12.2, + 'uv_index': 7, + 'wind_bearing': 280, + 'wind_gust_speed': 27.8, + 'wind_speed': 18.5, + }), + ]), + }), + }) +# --- # name: test_forecast_subscription list([ dict({ diff --git a/tests/components/accuweather/test_weather.py b/tests/components/accuweather/test_weather.py index 5a35f2798d8..920e5cf82b9 100644 --- a/tests/components/accuweather/test_weather.py +++ b/tests/components/accuweather/test_weather.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import PropertyMock, patch from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.accuweather.const import ATTRIBUTION @@ -31,7 +32,8 @@ from homeassistant.components.weather import ( ATTR_WEATHER_WIND_GUST_SPEED, ATTR_WEATHER_WIND_SPEED, DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, WeatherEntityFeature, ) from homeassistant.const import ( @@ -206,16 +208,24 @@ async def test_unsupported_condition_icon_data(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_FORECAST_CONDITION) is None +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) async def test_forecast_service( hass: HomeAssistant, snapshot: SnapshotAssertion, + service: str, ) -> None: """Test multiple forecast.""" await init_integration(hass, forecast=True) response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.home", "type": "daily", @@ -223,7 +233,6 @@ async def test_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot diff --git a/tests/components/aemet/snapshots/test_weather.ambr b/tests/components/aemet/snapshots/test_weather.ambr index 08cc379267d..9a7b79d94ea 100644 --- a/tests/components/aemet/snapshots/test_weather.ambr +++ b/tests/components/aemet/snapshots/test_weather.ambr @@ -490,6 +490,1454 @@ ]), }) # --- +# name: test_forecast_service[forecast] + dict({ + 'weather.aemet': dict({ + 'forecast': list([ + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T00:00:00+00:00', + 'precipitation_probability': 30, + 'temperature': 4.0, + 'templow': -4.0, + 'wind_bearing': 45.0, + 'wind_speed': 20.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-11T00:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 3.0, + 'templow': -7.0, + 'wind_bearing': 0.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-12T00:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -1.0, + 'templow': -13.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-01-13T00:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 6.0, + 'templow': -11.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-14T00:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 6.0, + 'templow': -7.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-15T00:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 5.0, + 'templow': -4.0, + 'wind_bearing': None, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].1 + dict({ + 'weather.aemet': dict({ + 'forecast': list([ + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T12:00:00+00:00', + 'precipitation': 3.6, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 15.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T13:00:00+00:00', + 'precipitation': 2.7, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 22.0, + 'wind_speed': 15.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T14:00:00+00:00', + 'precipitation': 0.6, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 14.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T15:00:00+00:00', + 'precipitation': 0.8, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 20.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T16:00:00+00:00', + 'precipitation': 1.4, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 14.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T17:00:00+00:00', + 'precipitation': 1.2, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 9.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T18:00:00+00:00', + 'precipitation': 0.4, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 7.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T19:00:00+00:00', + 'precipitation': 0.3, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T20:00:00+00:00', + 'precipitation': 0.1, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-09T21:00:00+00:00', + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 8.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-09T22:00:00+00:00', + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 9.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-09T23:00:00+00:00', + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T00:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T01:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 10.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T02:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T03:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 9.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T04:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T05:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T06:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 9.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T07:00:00+00:00', + 'precipitation_probability': 15, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 18.0, + 'wind_speed': 13.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T08:00:00+00:00', + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T09:00:00+00:00', + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 31.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T10:00:00+00:00', + 'precipitation_probability': 15, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T11:00:00+00:00', + 'precipitation_probability': 15, + 'temperature': 2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T12:00:00+00:00', + 'precipitation_probability': 15, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 22.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T13:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 20.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T14:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 4.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 28.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T16:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T17:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T18:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T19:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T20:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T21:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T22:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 27.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T23:00:00+00:00', + 'precipitation_probability': None, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-11T00:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-11T01:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 27.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T02:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 22.0, + 'wind_speed': 12.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T03:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 17.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T04:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 11.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T05:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -4.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T06:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -4.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 10.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T00:00:00+00:00', + 'precipitation_probability': 30, + 'temperature': 4.0, + 'templow': -4.0, + 'wind_bearing': 45.0, + 'wind_speed': 20.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-11T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 3.0, + 'templow': -7.0, + 'wind_bearing': 0.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-12T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': -1.0, + 'templow': -13.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-01-13T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 6.0, + 'templow': -11.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-14T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 6.0, + 'templow': -7.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-15T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 5.0, + 'templow': -4.0, + 'wind_bearing': None, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T12:00:00+00:00', + 'precipitation': 3.6, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 15.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T13:00:00+00:00', + 'precipitation': 2.7, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 22.0, + 'wind_speed': 15.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T14:00:00+00:00', + 'precipitation': 0.6, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 14.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T15:00:00+00:00', + 'precipitation': 0.8, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 20.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T16:00:00+00:00', + 'precipitation': 1.4, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 14.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T17:00:00+00:00', + 'precipitation': 1.2, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 9.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T18:00:00+00:00', + 'precipitation': 0.4, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 7.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T19:00:00+00:00', + 'precipitation': 0.3, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T20:00:00+00:00', + 'precipitation': 0.1, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-09T21:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 8.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-09T22:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 9.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-09T23:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T00:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T01:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 10.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T02:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T03:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 9.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T04:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T06:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 9.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T07:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 18.0, + 'wind_speed': 13.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T08:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T09:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 31.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': 2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T12:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 22.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T13:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 20.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T14:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T15:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 4.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 28.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T16:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T17:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T18:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T19:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T20:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T21:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T22:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 27.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T23:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-11T00:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-11T01:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 27.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T02:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 22.0, + 'wind_speed': 12.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T03:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 17.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T04:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 11.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -4.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T06:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -4.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 10.0, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecasts] + dict({ + 'weather.aemet': dict({ + 'forecast': list([ + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T00:00:00+00:00', + 'precipitation_probability': 30, + 'temperature': 4.0, + 'templow': -4.0, + 'wind_bearing': 45.0, + 'wind_speed': 20.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-11T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 3.0, + 'templow': -7.0, + 'wind_bearing': 0.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-12T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': -1.0, + 'templow': -13.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-01-13T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 6.0, + 'templow': -11.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-14T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 6.0, + 'templow': -7.0, + 'wind_bearing': None, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-15T00:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 5.0, + 'templow': -4.0, + 'wind_bearing': None, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].1 + dict({ + 'weather.aemet': dict({ + 'forecast': list([ + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T12:00:00+00:00', + 'precipitation': 3.6, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 15.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T13:00:00+00:00', + 'precipitation': 2.7, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 22.0, + 'wind_speed': 15.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T14:00:00+00:00', + 'precipitation': 0.6, + 'precipitation_probability': 100, + 'temperature': 0.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 14.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T15:00:00+00:00', + 'precipitation': 0.8, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 20.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T16:00:00+00:00', + 'precipitation': 1.4, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 14.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T17:00:00+00:00', + 'precipitation': 1.2, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 9.0, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-01-09T18:00:00+00:00', + 'precipitation': 0.4, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 7.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T19:00:00+00:00', + 'precipitation': 0.3, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-01-09T20:00:00+00:00', + 'precipitation': 0.1, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 135.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-09T21:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 8.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-09T22:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 9.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-09T23:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 100, + 'temperature': 1.0, + 'wind_bearing': 90.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T00:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T01:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 10.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T02:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'fog', + 'datetime': '2021-01-10T03:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 0.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 9.0, + 'wind_speed': 6.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T04:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 12.0, + 'wind_speed': 8.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': -1.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 11.0, + 'wind_speed': 5.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T06:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 13.0, + 'wind_speed': 9.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T07:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 18.0, + 'wind_speed': 13.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T08:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T09:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 31.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 15, + 'temperature': 2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T12:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 22.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T13:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 32.0, + 'wind_speed': 20.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T14:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T15:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 4.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 28.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T16:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T17:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 5, + 'temperature': 2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T18:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-10T19:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T20:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 25.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T21:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 24.0, + 'wind_speed': 17.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T22:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 27.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-10T23:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 0.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 21.0, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-01-11T00:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 30.0, + 'wind_speed': 19.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-01-11T01:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -1.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 27.0, + 'wind_speed': 16.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T02:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 22.0, + 'wind_speed': 12.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T03:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -2.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 17.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T04:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -3.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 11.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T05:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': -4.0, + 'wind_bearing': 45.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 10.0, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-01-11T06:00:00+00:00', + 'precipitation_probability': None, + 'temperature': -4.0, + 'wind_bearing': 0.0, + 'wind_gust_speed': 15.0, + 'wind_speed': 10.0, + }), + ]), + }), + }) +# --- # name: test_forecast_subscription[daily] list([ dict({ diff --git a/tests/components/aemet/test_weather.py b/tests/components/aemet/test_weather.py index f7ab39b9a71..695087bb738 100644 --- a/tests/components/aemet/test_weather.py +++ b/tests/components/aemet/test_weather.py @@ -29,7 +29,8 @@ from homeassistant.components.weather import ( ATTR_WEATHER_WIND_GUST_SPEED, ATTR_WEATHER_WIND_SPEED, DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, ) from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant @@ -122,10 +123,18 @@ async def test_aemet_weather_legacy( assert state is None +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) async def test_forecast_service( hass: HomeAssistant, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, + service: str, ) -> None: """Test multiple forecast.""" @@ -135,7 +144,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.aemet", "type": "daily", @@ -147,7 +156,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.aemet", "type": "hourly", diff --git a/tests/components/airq/test_config_flow.py b/tests/components/airq/test_config_flow.py index 252c12f80fa..52fc8d2300b 100644 --- a/tests/components/airq/test_config_flow.py +++ b/tests/components/airq/test_config_flow.py @@ -1,7 +1,7 @@ """Test the air-Q config flow.""" from unittest.mock import patch -from aioairq.core import DeviceInfo, InvalidAuth, InvalidInput +from aioairq import DeviceInfo, InvalidAuth, InvalidInput from aiohttp.client_exceptions import ClientConnectionError import pytest diff --git a/tests/components/airvisual_pro/conftest.py b/tests/components/airvisual_pro/conftest.py index 4376db23366..9ebe13c83e6 100644 --- a/tests/components/airvisual_pro/conftest.py +++ b/tests/components/airvisual_pro/conftest.py @@ -78,9 +78,7 @@ async def setup_airvisual_pro_fixture(hass, config, pro): "homeassistant.components.airvisual_pro.config_flow.NodeSamba", return_value=pro ), patch( "homeassistant.components.airvisual_pro.NodeSamba", return_value=pro - ), patch( - "homeassistant.components.airvisual.PLATFORMS", [] - ): + ), patch("homeassistant.components.airvisual.PLATFORMS", []): assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() yield diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index a6be57e9ed5..11e39c40cb1 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -183,7 +183,7 @@ async def test_api_increase_color_temp( ("domain", "payload", "source_list", "idx"), [ ("media_player", "GAME CONSOLE", ["tv", "game console", 10000], 1), - ("media_player", "SATELLITE TV", ["satellite-tv", "game console"], 0), + ("media_player", "SATELLITE TV", ["satellite-tv", "game console", None], 0), ("media_player", "SATELLITE TV", ["satellite_tv", "game console"], 0), ("media_player", "BAD DEVICE", ["satellite_tv", "game console"], None), ], @@ -864,6 +864,7 @@ async def test_report_playback_state(hass: HomeAssistant) -> None: | MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.STOP, "volume_level": 0.75, + "source_list": [None], }, ) diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index e24ec4c950b..7a1abe96110 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -1439,6 +1439,8 @@ async def test_media_player_inputs(hass: HomeAssistant) -> None: "aux", "input 1", "tv", + 0, + None, ], }, ) diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index 4e51880c754..d22738a7e6b 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -180,9 +180,11 @@ async def test_send_base_with_supervisor( "homeassistant.components.hassio.is_hassio", side_effect=Mock(return_value=True), ), patch( - "uuid.UUID.hex", new_callable=PropertyMock + "uuid.UUID.hex", + new_callable=PropertyMock, ) as hex, patch( - "homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION + "homeassistant.components.analytics.analytics.HA_VERSION", + MOCK_VERSION, ): hex.return_value = MOCK_UUID await analytics.load() @@ -289,7 +291,8 @@ async def test_send_usage_with_supervisor( "homeassistant.components.hassio.is_hassio", side_effect=Mock(return_value=True), ), patch( - "homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION + "homeassistant.components.analytics.analytics.HA_VERSION", + MOCK_VERSION, ): await analytics.send_analytics() assert ( @@ -492,7 +495,8 @@ async def test_send_statistics_with_supervisor( "homeassistant.components.hassio.is_hassio", side_effect=Mock(return_value=True), ), patch( - "homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION + "homeassistant.components.analytics.analytics.HA_VERSION", + MOCK_VERSION, ): await analytics.send_analytics() assert "'addon_count': 1" in caplog.text diff --git a/tests/components/anova/__init__.py b/tests/components/anova/__init__.py index 5bcb84cb974..aa58ee5bbb5 100644 --- a/tests/components/anova/__init__.py +++ b/tests/components/anova/__init__.py @@ -51,7 +51,7 @@ async def async_init_integration( ) as update_patch, patch( "homeassistant.components.anova.AnovaApi.authenticate" ), patch( - "homeassistant.components.anova.AnovaApi.get_devices" + "homeassistant.components.anova.AnovaApi.get_devices", ) as device_patch: update_patch.return_value = ONLINE_UPDATE device_patch.return_value = [ diff --git a/tests/components/apcupsd/__init__.py b/tests/components/apcupsd/__init__.py index b8a83f950d0..b0eee051331 100644 --- a/tests/components/apcupsd/__init__.py +++ b/tests/components/apcupsd/__init__.py @@ -95,8 +95,9 @@ async def async_init_integration( entry.add_to_hass(hass) - with patch("apcaccess.status.parse", return_value=status), patch( - "apcaccess.status.get", return_value=b"" + with ( + patch("apcaccess.status.parse", return_value=status), + patch("apcaccess.status.get", return_value=b""), ): assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/apcupsd/test_config_flow.py b/tests/components/apcupsd/test_config_flow.py index 6ac7992f404..48d57890320 100644 --- a/tests/components/apcupsd/test_config_flow.py +++ b/tests/components/apcupsd/test_config_flow.py @@ -38,10 +38,10 @@ async def test_config_flow_cannot_connect(hass: HomeAssistant) -> None: async def test_config_flow_no_status(hass: HomeAssistant) -> None: """Test config flow setup with successful connection but no status is reported.""" - with patch( - "apcaccess.status.parse", - return_value={}, # Returns no status. - ), patch("apcaccess.status.get", return_value=b""): + with ( + patch("apcaccess.status.parse", return_value={}), # Returns no status. + patch("apcaccess.status.get", return_value=b""), + ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) @@ -63,9 +63,11 @@ async def test_config_flow_duplicate(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - with patch("apcaccess.status.parse") as mock_parse, patch( - "apcaccess.status.get", return_value=b"" - ), _patch_setup(): + with ( + patch("apcaccess.status.parse") as mock_parse, + patch("apcaccess.status.get", return_value=b""), + _patch_setup(), + ): mock_parse.return_value = MOCK_STATUS # Now, create the integration again using the same config data, we should reject @@ -109,9 +111,11 @@ async def test_config_flow_duplicate(hass: HomeAssistant) -> None: async def test_flow_works(hass: HomeAssistant) -> None: """Test successful creation of config entries via user configuration.""" - with patch("apcaccess.status.parse", return_value=MOCK_STATUS), patch( - "apcaccess.status.get", return_value=b"" - ), _patch_setup() as mock_setup: + with ( + patch("apcaccess.status.parse", return_value=MOCK_STATUS), + patch("apcaccess.status.get", return_value=b""), + _patch_setup() as mock_setup, + ): result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -147,9 +151,11 @@ async def test_flow_minimal_status( We test different combinations of minimal statuses, where the title of the integration will vary. """ - with patch("apcaccess.status.parse") as mock_parse, patch( - "apcaccess.status.get", return_value=b"" - ), _patch_setup() as mock_setup: + with ( + patch("apcaccess.status.parse") as mock_parse, + patch("apcaccess.status.get", return_value=b""), + _patch_setup() as mock_setup, + ): status = MOCK_MINIMAL_STATUS | extra_status mock_parse.return_value = status diff --git a/tests/components/apcupsd/test_init.py b/tests/components/apcupsd/test_init.py index 9bdcc89a9a3..756fa07f120 100644 --- a/tests/components/apcupsd/test_init.py +++ b/tests/components/apcupsd/test_init.py @@ -4,15 +4,17 @@ from unittest.mock import patch import pytest -from homeassistant.components.apcupsd import DOMAIN +from homeassistant.components.apcupsd.const import DOMAIN +from homeassistant.components.apcupsd.coordinator import UPDATE_INTERVAL from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.util import utcnow from . import CONF_DATA, MOCK_MINIMAL_STATUS, MOCK_STATUS, async_init_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.mark.parametrize("status", (MOCK_STATUS, MOCK_MINIMAL_STATUS)) @@ -67,11 +69,11 @@ async def test_device_entry( for field, entry_value in fields.items(): if field in status: assert entry_value == status[field] + # Even if UPSNAME is not available, we must fall back to default "APC UPS". elif field == "UPSNAME": - # Even if UPSNAME is not available, we must fall back to default "APC UPS". assert entry_value == "APC UPS" else: - assert entry_value is None + assert not entry_value assert entry.manufacturer == "APC" @@ -107,15 +109,16 @@ async def test_connection_error(hass: HomeAssistant) -> None: entry.add_to_hass(hass) - with patch("apcaccess.status.parse", side_effect=OSError()), patch( - "apcaccess.status.get" + with ( + patch("apcaccess.status.parse", side_effect=OSError()), + patch("apcaccess.status.get"), ): await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_ERROR + assert entry.state is ConfigEntryState.SETUP_RETRY -async def test_unload_remove(hass: HomeAssistant) -> None: - """Test successful unload of entry.""" +async def test_unload_remove_entry(hass: HomeAssistant) -> None: + """Test successful unload and removal of an entry.""" # Load two integrations from two mock hosts. entries = ( await async_init_integration(hass, host="test1", status=MOCK_STATUS), @@ -142,3 +145,41 @@ async def test_unload_remove(hass: HomeAssistant) -> None: await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + + +async def test_availability(hass: HomeAssistant) -> None: + """Ensure that we mark the entity's availability properly when network is down / back up.""" + await async_init_integration(hass) + + state = hass.states.get("sensor.ups_load") + assert state + assert state.state != STATE_UNAVAILABLE + assert pytest.approx(float(state.state)) == 14.0 + + with ( + patch("apcaccess.status.parse") as mock_parse, + patch("apcaccess.status.get", return_value=b""), + ): + # Mock a network error and then trigger an auto-polling event. + mock_parse.side_effect = OSError() + future = utcnow() + UPDATE_INTERVAL + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + # Sensors should be marked as unavailable. + state = hass.states.get("sensor.ups_load") + assert state + assert state.state == STATE_UNAVAILABLE + + # Reset the API to return a new status and update. + mock_parse.side_effect = None + mock_parse.return_value = MOCK_STATUS | {"LOADPCT": "15.0 Percent"} + future = future + UPDATE_INTERVAL + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + # Sensors should be online now with the new value. + state = hass.states.get("sensor.ups_load") + assert state + assert state.state != STATE_UNAVAILABLE + assert pytest.approx(float(state.state)) == 15.0 diff --git a/tests/components/apcupsd/test_sensor.py b/tests/components/apcupsd/test_sensor.py index 743b1f87847..bff1b858216 100644 --- a/tests/components/apcupsd/test_sensor.py +++ b/tests/components/apcupsd/test_sensor.py @@ -1,5 +1,9 @@ """Test sensors of APCUPSd integration.""" +from datetime import timedelta +from unittest.mock import patch + +from homeassistant.components.apcupsd.coordinator import REQUEST_REFRESH_COOLDOWN from homeassistant.components.sensor import ( ATTR_STATE_CLASS, SensorDeviceClass, @@ -7,17 +11,23 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( ATTR_DEVICE_CLASS, + ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, + STATE_UNAVAILABLE, UnitOfElectricPotential, UnitOfPower, UnitOfTime, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow from . import MOCK_STATUS, async_init_integration +from tests.common import async_fire_time_changed + async def test_sensor(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test states of sensor.""" @@ -105,3 +115,97 @@ async def test_sensor_disabled( assert updated_entry != entry assert updated_entry.disabled is False + + +async def test_state_update(hass: HomeAssistant) -> None: + """Ensure the sensor state changes after updating the data.""" + await async_init_integration(hass) + + state = hass.states.get("sensor.ups_load") + assert state + assert state.state != STATE_UNAVAILABLE + assert state.state == "14.0" + + new_status = MOCK_STATUS | {"LOADPCT": "15.0 Percent"} + with ( + patch("apcaccess.status.parse", return_value=new_status), + patch("apcaccess.status.get", return_value=b""), + ): + future = utcnow() + timedelta(minutes=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + + state = hass.states.get("sensor.ups_load") + assert state + assert state.state != STATE_UNAVAILABLE + assert state.state == "15.0" + + +async def test_manual_update_entity(hass: HomeAssistant) -> None: + """Test manual update entity via service homeassistant/update_entity.""" + await async_init_integration(hass) + + # Assert the initial state of sensor.ups_load. + state = hass.states.get("sensor.ups_load") + assert state + assert state.state != STATE_UNAVAILABLE + assert state.state == "14.0" + + # Setup HASS for calling the update_entity service. + await async_setup_component(hass, "homeassistant", {}) + + with ( + patch("apcaccess.status.parse") as mock_parse, + patch("apcaccess.status.get", return_value=b"") as mock_get, + ): + mock_parse.return_value = MOCK_STATUS | { + "LOADPCT": "15.0 Percent", + "BCHARGE": "99.0 Percent", + } + # Now, we fast-forward the time to pass the debouncer cooldown, but put it + # before the normal update interval to see if the manual update works. + future = utcnow() + timedelta(seconds=REQUEST_REFRESH_COOLDOWN) + async_fire_time_changed(hass, future) + await hass.services.async_call( + "homeassistant", + "update_entity", + {ATTR_ENTITY_ID: ["sensor.ups_load", "sensor.ups_battery"]}, + blocking=True, + ) + # Even if we requested updates for two entities, our integration should smartly + # group the API calls to just one. + assert mock_parse.call_count == 1 + assert mock_get.call_count == 1 + + # The new state should be effective. + state = hass.states.get("sensor.ups_load") + assert state + assert state.state != STATE_UNAVAILABLE + assert state.state == "15.0" + + +async def test_multiple_manual_update_entity(hass: HomeAssistant) -> None: + """Test multiple simultaneous manual update entity via service homeassistant/update_entity. + + We should only do network call once for the multiple simultaneous update entity services. + """ + await async_init_integration(hass) + + # Setup HASS for calling the update_entity service. + await async_setup_component(hass, "homeassistant", {}) + + with ( + patch("apcaccess.status.parse", return_value=MOCK_STATUS) as mock_parse, + patch("apcaccess.status.get", return_value=b"") as mock_get, + ): + # Fast-forward time to just pass the initial debouncer cooldown. + future = utcnow() + timedelta(seconds=REQUEST_REFRESH_COOLDOWN) + async_fire_time_changed(hass, future) + await hass.services.async_call( + "homeassistant", + "update_entity", + {ATTR_ENTITY_ID: ["sensor.ups_load", "sensor.ups_input_voltage"]}, + blocking=True, + ) + assert mock_parse.call_count == 1 + assert mock_get.call_count == 1 diff --git a/tests/components/api/test_init.py b/tests/components/api/test_init.py index f97b55c3ede..08cb77b4559 100644 --- a/tests/components/api/test_init.py +++ b/tests/components/api/test_init.py @@ -1,9 +1,10 @@ """The tests for the Home Assistant API component.""" +import asyncio from http import HTTPStatus import json from unittest.mock import patch -from aiohttp import web +from aiohttp import ServerDisconnectedError, web from aiohttp.test_utils import TestClient import pytest import voluptuous as vol @@ -352,26 +353,41 @@ async def test_api_call_service_with_data( assert state["attributes"] == {"data": 1} -async def test_api_call_service_timeout( +async def test_api_call_service_client_closed( hass: HomeAssistant, mock_api_client: TestClient ) -> None: - """Test if the API does not fail on long running services.""" + """Test that services keep running if client is closed.""" test_value = [] fut = hass.loop.create_future() + service_call_started = asyncio.Event() async def listener(service_call): """Wait and return after mock_api_client.post finishes.""" + service_call_started.set() value = await fut test_value.append(value) hass.services.async_register("test_domain", "test_service", listener) - with patch("homeassistant.components.api.SERVICE_WAIT_TIMEOUT", 0): - await mock_api_client.post("/api/services/test_domain/test_service") - assert len(test_value) == 0 - fut.set_result(1) - await hass.async_block_till_done() + api_task = hass.async_create_task( + mock_api_client.post("/api/services/test_domain/test_service") + ) + + await service_call_started.wait() + + assert len(test_value) == 0 + + await mock_api_client.close() + + assert len(test_value) == 0 + assert api_task.done() + + with pytest.raises(ServerDisconnectedError): + await api_task + + fut.set_result(1) + await hass.async_block_till_done() assert len(test_value) == 1 assert test_value[0] == 1 diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index cc56894cf0d..807eff4ef8d 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -479,7 +479,7 @@ async def test_config_flow( resp = await client.cmd("delete", {"application_credentials_id": ID}) assert not resp.get("success") assert "error" in resp - assert resp["error"].get("code") == "unknown_error" + assert resp["error"].get("code") == "home_assistant_error" assert ( resp["error"].get("message") == "Cannot delete credential in use by integration fake_integration" diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index 9eb7e1e5a05..072b1ff730a 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -487,6 +487,119 @@ # name: test_audio_pipeline_with_wake_word_timeout.3 None # --- +# name: test_device_capture + dict({ + 'language': 'en', + 'pipeline': , + 'runner_data': dict({ + 'stt_binary_handler_id': 1, + 'timeout': 300, + }), + }) +# --- +# name: test_device_capture.1 + dict({ + 'engine': 'test', + 'metadata': dict({ + 'bit_rate': 16, + 'channel': 1, + 'codec': 'pcm', + 'format': 'wav', + 'language': 'en-US', + 'sample_rate': 16000, + }), + }) +# --- +# name: test_device_capture.2 + None +# --- +# name: test_device_capture_override + dict({ + 'language': 'en', + 'pipeline': , + 'runner_data': dict({ + 'stt_binary_handler_id': 1, + 'timeout': 300, + }), + }) +# --- +# name: test_device_capture_override.1 + dict({ + 'engine': 'test', + 'metadata': dict({ + 'bit_rate': 16, + 'channel': 1, + 'codec': 'pcm', + 'format': 'wav', + 'language': 'en-US', + 'sample_rate': 16000, + }), + }) +# --- +# name: test_device_capture_override.2 + dict({ + 'audio': 'Y2h1bmsx', + 'channels': 1, + 'rate': 16000, + 'type': 'audio', + 'width': 2, + }) +# --- +# name: test_device_capture_override.3 + dict({ + 'stt_output': dict({ + 'text': 'test transcript', + }), + }) +# --- +# name: test_device_capture_override.4 + None +# --- +# name: test_device_capture_override.5 + dict({ + 'overflow': False, + 'type': 'end', + }) +# --- +# name: test_device_capture_queue_full + dict({ + 'language': 'en', + 'pipeline': , + 'runner_data': dict({ + 'stt_binary_handler_id': 1, + 'timeout': 300, + }), + }) +# --- +# name: test_device_capture_queue_full.1 + dict({ + 'engine': 'test', + 'metadata': dict({ + 'bit_rate': 16, + 'channel': 1, + 'codec': 'pcm', + 'format': 'wav', + 'language': 'en-US', + 'sample_rate': 16000, + }), + }) +# --- +# name: test_device_capture_queue_full.2 + dict({ + 'stt_output': dict({ + 'text': 'test transcript', + }), + }) +# --- +# name: test_device_capture_queue_full.3 + None +# --- +# name: test_device_capture_queue_full.4 + dict({ + 'overflow': True, + 'type': 'end', + }) +# --- # name: test_intent_failed dict({ 'language': 'en', @@ -537,6 +650,33 @@ 'message': 'Timeout running pipeline', }) # --- +# name: test_pipeline_empty_tts_output + dict({ + 'language': 'en', + 'pipeline': , + 'runner_data': dict({ + 'stt_binary_handler_id': None, + 'timeout': 300, + }), + }) +# --- +# name: test_pipeline_empty_tts_output.1 + dict({ + 'engine': 'test', + 'language': 'en-US', + 'tts_input': '', + 'voice': 'james_earl_jones', + }) +# --- +# name: test_pipeline_empty_tts_output.2 + dict({ + 'tts_output': dict({ + }), + }) +# --- +# name: test_pipeline_empty_tts_output.3 + None +# --- # name: test_stt_provider_missing dict({ 'language': 'en', diff --git a/tests/components/assist_pipeline/test_logbook.py b/tests/components/assist_pipeline/test_logbook.py new file mode 100644 index 00000000000..c1e0633ed57 --- /dev/null +++ b/tests/components/assist_pipeline/test_logbook.py @@ -0,0 +1,42 @@ +"""The tests for assist_pipeline logbook.""" +from homeassistant.components import assist_pipeline, logbook +from homeassistant.const import ATTR_DEVICE_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.components.logbook.common import MockRow, mock_humanify + + +async def test_recording_event( + hass: HomeAssistant, init_components, device_registry: dr.DeviceRegistry +) -> None: + """Test recording event.""" + hass.config.components.add("recorder") + assert await async_setup_component(hass, "logbook", {}) + + entry = MockConfigEntry() + entry.add_to_hass(hass) + satellite_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "satellite-1234")}, + ) + + device_registry.async_update_device(satellite_device.id, name="My Satellite") + event = mock_humanify( + hass, + [ + MockRow( + assist_pipeline.EVENT_RECORDING, + {ATTR_DEVICE_ID: satellite_device.id}, + ), + ], + )[0] + + assert event[logbook.LOGBOOK_ENTRY_NAME] == "My Satellite" + assert event[logbook.LOGBOOK_ENTRY_DOMAIN] == assist_pipeline.DOMAIN + assert ( + event[logbook.LOGBOOK_ENTRY_MESSAGE] == "My Satellite captured an audio sample" + ) diff --git a/tests/components/assist_pipeline/test_select.py b/tests/components/assist_pipeline/test_select.py index 9e70e65e0a8..c4e750e1019 100644 --- a/tests/components/assist_pipeline/test_select.py +++ b/tests/components/assist_pipeline/test_select.py @@ -20,7 +20,7 @@ from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from tests.common import MockConfigEntry, MockPlatform, mock_entity_platform +from tests.common import MockConfigEntry, MockPlatform, mock_platform class SelectPlatform(MockPlatform): @@ -47,7 +47,7 @@ class SelectPlatform(MockPlatform): @pytest.fixture async def init_select(hass: HomeAssistant, init_components) -> ConfigEntry: """Initialize select entity.""" - mock_entity_platform(hass, "select.assist_pipeline", SelectPlatform()) + mock_platform(hass, "assist_pipeline.select", SelectPlatform()) config_entry = MockConfigEntry(domain="assist_pipeline") config_entry.add_to_hass(hass) assert await hass.config_entries.async_forward_entry_setup(config_entry, "select") diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index 9a4e78a29af..0e2a3ad538c 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -1,16 +1,23 @@ """Websocket tests for Voice Assistant integration.""" import asyncio +import base64 from unittest.mock import ANY, patch from syrupy.assertion import SnapshotAssertion from homeassistant.components.assist_pipeline.const import DOMAIN -from homeassistant.components.assist_pipeline.pipeline import Pipeline, PipelineData +from homeassistant.components.assist_pipeline.pipeline import ( + DeviceAudioQueue, + Pipeline, + PipelineData, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr from .conftest import MockWakeWordEntity, MockWakeWordEntity2 +from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -2104,3 +2111,395 @@ async def test_wake_word_cooldown_different_entities( # Wake words should be the same assert ww_id_1 == ww_id_2 + + +async def test_device_capture( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test audio capture from a satellite device.""" + entry = MockConfigEntry() + entry.add_to_hass(hass) + satellite_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "satellite-1234")}, + ) + + audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + + # Start capture + client_capture = await hass_ws_client(hass) + await client_capture.send_json_auto_id( + { + "type": "assist_pipeline/device/capture", + "timeout": 30, + "device_id": satellite_device.id, + } + ) + + # result + msg = await client_capture.receive_json() + assert msg["success"] + + # Run pipeline + client_pipeline = await hass_ws_client(hass) + await client_pipeline.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "stt", + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, + "device_id": satellite_device.id, + } + ) + + # result + msg = await client_pipeline.receive_json() + assert msg["success"] + + # run start + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + + # stt + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + + for audio_chunk in audio_chunks: + await client_pipeline.send_bytes(bytes([handler_id]) + audio_chunk) + + # End of audio stream + await client_pipeline.send_bytes(bytes([handler_id])) + + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "stt-end" + + # run end + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + + # Verify capture + events = [] + async with asyncio.timeout(1): + while True: + msg = await client_capture.receive_json() + assert msg["type"] == "event" + event_data = msg["event"] + events.append(event_data) + if event_data["type"] == "end": + break + + assert len(events) == len(audio_chunks) + 1 + + # Verify audio chunks + for i, audio_chunk in enumerate(audio_chunks): + assert events[i]["type"] == "audio" + assert events[i]["rate"] == 16000 + assert events[i]["width"] == 2 + assert events[i]["channels"] == 1 + + # Audio is base64 encoded + assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") + + # Last event is the end + assert events[-1]["type"] == "end" + + +async def test_device_capture_override( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test overriding an existing audio capture from a satellite device.""" + entry = MockConfigEntry() + entry.add_to_hass(hass) + satellite_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "satellite-1234")}, + ) + + audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + + # Start first capture + client_capture_1 = await hass_ws_client(hass) + await client_capture_1.send_json_auto_id( + { + "type": "assist_pipeline/device/capture", + "timeout": 30, + "device_id": satellite_device.id, + } + ) + + # result + msg = await client_capture_1.receive_json() + assert msg["success"] + + # Run pipeline + client_pipeline = await hass_ws_client(hass) + await client_pipeline.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "stt", + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, + "device_id": satellite_device.id, + } + ) + + # result + msg = await client_pipeline.receive_json() + assert msg["success"] + + # run start + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + + # stt + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + + # Send first audio chunk + await client_pipeline.send_bytes(bytes([handler_id]) + audio_chunks[0]) + + # Verify first capture + msg = await client_capture_1.receive_json() + assert msg["type"] == "event" + assert msg["event"] == snapshot + assert msg["event"]["audio"] == base64.b64encode(audio_chunks[0]).decode("ascii") + + # Start a new capture + client_capture_2 = await hass_ws_client(hass) + await client_capture_2.send_json_auto_id( + { + "type": "assist_pipeline/device/capture", + "timeout": 30, + "device_id": satellite_device.id, + } + ) + + # result (capture 2) + msg = await client_capture_2.receive_json() + assert msg["success"] + + # Send remaining audio chunks + for audio_chunk in audio_chunks[1:]: + await client_pipeline.send_bytes(bytes([handler_id]) + audio_chunk) + + # End of audio stream + await client_pipeline.send_bytes(bytes([handler_id])) + + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + + # run end + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + + # Verify that first capture ended with no more audio + msg = await client_capture_1.receive_json() + assert msg["type"] == "event" + assert msg["event"] == snapshot + assert msg["event"]["type"] == "end" + + # Verify that the second capture got the remaining audio + events = [] + async with asyncio.timeout(1): + while True: + msg = await client_capture_2.receive_json() + assert msg["type"] == "event" + event_data = msg["event"] + events.append(event_data) + if event_data["type"] == "end": + break + + # -1 since first audio chunk went to the first capture + assert len(events) == len(audio_chunks) + + # Verify all but first audio chunk + for i, audio_chunk in enumerate(audio_chunks[1:]): + assert events[i]["type"] == "audio" + assert events[i]["rate"] == 16000 + assert events[i]["width"] == 2 + assert events[i]["channels"] == 1 + + # Audio is base64 encoded + assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") + + # Last event is the end + assert events[-1]["type"] == "end" + + +async def test_device_capture_queue_full( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test audio capture from a satellite device when the recording queue fills up.""" + entry = MockConfigEntry() + entry.add_to_hass(hass) + satellite_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "satellite-1234")}, + ) + + class FakeQueue(asyncio.Queue): + """Queue that reports full for anything but None.""" + + def put_nowait(self, item): + if item is not None: + raise asyncio.QueueFull() + + super().put_nowait(item) + + with patch( + "homeassistant.components.assist_pipeline.websocket_api.DeviceAudioQueue" + ) as mock: + mock.return_value = DeviceAudioQueue(queue=FakeQueue()) + + # Start capture + client_capture = await hass_ws_client(hass) + await client_capture.send_json_auto_id( + { + "type": "assist_pipeline/device/capture", + "timeout": 30, + "device_id": satellite_device.id, + } + ) + + # result + msg = await client_capture.receive_json() + assert msg["success"] + + # Run pipeline + client_pipeline = await hass_ws_client(hass) + await client_pipeline.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "stt", + "input": { + "sample_rate": 16000, + "no_vad": True, + "no_chunking": True, + }, + "device_id": satellite_device.id, + } + ) + + # result + msg = await client_pipeline.receive_json() + assert msg["success"] + + # run start + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + + # stt + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + + # Single sample will "overflow" the queue + await client_pipeline.send_bytes(bytes([handler_id, 0, 0])) + + # End of audio stream + await client_pipeline.send_bytes(bytes([handler_id])) + + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + + msg = await client_pipeline.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + + # Queue should have been overflowed + async with asyncio.timeout(1): + msg = await client_capture.receive_json() + assert msg["type"] == "event" + assert msg["event"] == snapshot + assert msg["event"]["type"] == "end" + assert msg["event"]["overflow"] + + +async def test_pipeline_empty_tts_output( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test events from a pipeline run with a empty text-to-speech text.""" + events = [] + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "tts", + "end_stage": "tts", + "input": { + "text": "", + }, + } + ) + + # result + msg = await client.receive_json() + assert msg["success"] + + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) + + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) + + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + assert not msg["event"]["data"]["tts_output"] + events.append(msg["event"]) + + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) diff --git a/tests/components/asuswrt/common.py b/tests/components/asuswrt/common.py index 8572584d65f..d3953416281 100644 --- a/tests/components/asuswrt/common.py +++ b/tests/components/asuswrt/common.py @@ -1,10 +1,13 @@ """Test code shared between test files.""" from aioasuswrt.asuswrt import Device as LegacyDevice +from pyasuswrt.asuswrt import Device as HttpDevice from homeassistant.components.asuswrt.const import ( CONF_SSH_KEY, MODE_ROUTER, + PROTOCOL_HTTP, + PROTOCOL_HTTPS, PROTOCOL_SSH, PROTOCOL_TELNET, ) @@ -40,6 +43,14 @@ CONFIG_DATA_SSH = { CONF_MODE: MODE_ROUTER, } +CONFIG_DATA_HTTP = { + CONF_HOST: HOST, + CONF_PORT: 80, + CONF_PROTOCOL: PROTOCOL_HTTPS, + CONF_USERNAME: "user", + CONF_PASSWORD: "pwd", +} + MOCK_MACS = [ "A1:B1:C1:D1:E1:F1", "A2:B2:C2:D2:E2:F2", @@ -48,6 +59,8 @@ MOCK_MACS = [ ] -def new_device(mac, ip, name): +def new_device(protocol, mac, ip, name): """Return a new device for specific protocol.""" + if protocol in [PROTOCOL_HTTP, PROTOCOL_HTTPS]: + return HttpDevice(mac, ip, name, ROUTER_MAC_ADDR, None) return LegacyDevice(mac, ip, name) diff --git a/tests/components/asuswrt/conftest.py b/tests/components/asuswrt/conftest.py index ab574cd667f..0f29c84c820 100644 --- a/tests/components/asuswrt/conftest.py +++ b/tests/components/asuswrt/conftest.py @@ -4,16 +4,24 @@ from unittest.mock import Mock, patch from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy from aioasuswrt.connection import TelnetConnection +from pyasuswrt.asuswrt import AsusWrtError, AsusWrtHttp import pytest +from homeassistant.components.asuswrt.const import PROTOCOL_HTTP, PROTOCOL_SSH + from .common import ASUSWRT_BASE, MOCK_MACS, ROUTER_MAC_ADDR, new_device +ASUSWRT_HTTP_LIB = f"{ASUSWRT_BASE}.bridge.AsusWrtHttp" ASUSWRT_LEGACY_LIB = f"{ASUSWRT_BASE}.bridge.AsusWrtLegacy" MOCK_BYTES_TOTAL = [60000000000, 50000000000] +MOCK_BYTES_TOTAL_HTTP = dict(enumerate(MOCK_BYTES_TOTAL)) MOCK_CURRENT_TRANSFER_RATES = [20000000, 10000000] -MOCK_LOAD_AVG = [1.1, 1.2, 1.3] -MOCK_TEMPERATURES = {"2.4GHz": 40.2, "5.0GHz": 0, "CPU": 71.2} +MOCK_CURRENT_TRANSFER_RATES_HTTP = dict(enumerate(MOCK_CURRENT_TRANSFER_RATES)) +MOCK_LOAD_AVG_HTTP = {"load_avg_1": 1.1, "load_avg_5": 1.2, "load_avg_15": 1.3} +MOCK_LOAD_AVG = list(MOCK_LOAD_AVG_HTTP.values()) +MOCK_TEMPERATURES_HTTP = {"2.4GHz": 40.2, "CPU": 71.2} +MOCK_TEMPERATURES = {**MOCK_TEMPERATURES_HTTP, "5.0GHz": 0} @pytest.fixture(name="patch_setup_entry") @@ -29,8 +37,17 @@ def mock_controller_patch_setup_entry(): def mock_devices_legacy_fixture(): """Mock a list of devices.""" return { - MOCK_MACS[0]: new_device(MOCK_MACS[0], "192.168.1.2", "Test"), - MOCK_MACS[1]: new_device(MOCK_MACS[1], "192.168.1.3", "TestTwo"), + MOCK_MACS[0]: new_device(PROTOCOL_SSH, MOCK_MACS[0], "192.168.1.2", "Test"), + MOCK_MACS[1]: new_device(PROTOCOL_SSH, MOCK_MACS[1], "192.168.1.3", "TestTwo"), + } + + +@pytest.fixture(name="mock_devices_http") +def mock_devices_http_fixture(): + """Mock a list of devices.""" + return { + MOCK_MACS[0]: new_device(PROTOCOL_HTTP, MOCK_MACS[0], "192.168.1.2", "Test"), + MOCK_MACS[1]: new_device(PROTOCOL_HTTP, MOCK_MACS[1], "192.168.1.3", "TestTwo"), } @@ -81,3 +98,48 @@ def mock_controller_connect_legacy_sens_fail(connect_legacy): True, True, ] + + +@pytest.fixture(name="connect_http") +def mock_controller_connect_http(mock_devices_http): + """Mock a successful connection with http library.""" + with patch(ASUSWRT_HTTP_LIB, spec_set=AsusWrtHttp) as service_mock: + service_mock.return_value.is_connected = True + service_mock.return_value.mac = ROUTER_MAC_ADDR + service_mock.return_value.model = "FAKE_MODEL" + service_mock.return_value.firmware = "FAKE_FIRMWARE" + service_mock.return_value.async_get_connected_devices.return_value = ( + mock_devices_http + ) + service_mock.return_value.async_get_traffic_bytes.return_value = ( + MOCK_BYTES_TOTAL_HTTP + ) + service_mock.return_value.async_get_traffic_rates.return_value = ( + MOCK_CURRENT_TRANSFER_RATES_HTTP + ) + service_mock.return_value.async_get_loadavg.return_value = MOCK_LOAD_AVG_HTTP + service_mock.return_value.async_get_temperatures.return_value = ( + MOCK_TEMPERATURES_HTTP + ) + yield service_mock + + +@pytest.fixture(name="connect_http_sens_fail") +def mock_controller_connect_http_sens_fail(connect_http): + """Mock a successful connection using http library with sensors fail.""" + connect_http.return_value.mac = None + connect_http.return_value.async_get_connected_devices.side_effect = AsusWrtError + connect_http.return_value.async_get_traffic_bytes.side_effect = AsusWrtError + connect_http.return_value.async_get_traffic_rates.side_effect = AsusWrtError + connect_http.return_value.async_get_loadavg.side_effect = AsusWrtError + connect_http.return_value.async_get_temperatures.side_effect = AsusWrtError + + +@pytest.fixture(name="connect_http_sens_detect") +def mock_controller_connect_http_sens_detect(): + """Mock a successful sensor detection using http library.""" + with patch( + f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_temperature_sensors", + return_value=[*MOCK_TEMPERATURES], + ) as mock_sens_detect: + yield mock_sens_detect diff --git a/tests/components/asuswrt/test_config_flow.py b/tests/components/asuswrt/test_config_flow.py index ec81c4a256a..0b5b0ace720 100644 --- a/tests/components/asuswrt/test_config_flow.py +++ b/tests/components/asuswrt/test_config_flow.py @@ -2,6 +2,7 @@ from socket import gaierror from unittest.mock import patch +from pyasuswrt import AsusWrtError import pytest from homeassistant import data_entry_flow @@ -13,18 +14,54 @@ from homeassistant.components.asuswrt.const import ( CONF_TRACK_UNKNOWN, DOMAIN, MODE_AP, + MODE_ROUTER, + PROTOCOL_HTTPS, + PROTOCOL_SSH, + PROTOCOL_TELNET, ) from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_MODE, CONF_PASSWORD +from homeassistant.const import ( + CONF_BASE, + CONF_HOST, + CONF_MODE, + CONF_PASSWORD, + CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant -from .common import ASUSWRT_BASE, CONFIG_DATA_TELNET, HOST, ROUTER_MAC_ADDR +from .common import ASUSWRT_BASE, HOST, ROUTER_MAC_ADDR from tests.common import MockConfigEntry SSH_KEY = "1234" +CONFIG_DATA = { + CONF_HOST: HOST, + CONF_USERNAME: "user", + CONF_PASSWORD: "pwd", +} + +CONFIG_DATA_HTTP = { + **CONFIG_DATA, + CONF_PROTOCOL: PROTOCOL_HTTPS, + CONF_PORT: 8443, +} + +CONFIG_DATA_SSH = { + **CONFIG_DATA, + CONF_PROTOCOL: PROTOCOL_SSH, + CONF_PORT: 22, +} + +CONFIG_DATA_TELNET = { + **CONFIG_DATA, + CONF_PROTOCOL: PROTOCOL_TELNET, + CONF_PORT: 23, +} + @pytest.fixture(name="patch_get_host", autouse=True) def mock_controller_patch_get_host(): @@ -45,7 +82,7 @@ def mock_controller_patch_is_file(): @pytest.mark.parametrize("unique_id", [{}, {"label_mac": ROUTER_MAC_ADDR}]) -async def test_user( +async def test_user_legacy( hass: HomeAssistant, connect_legacy, patch_setup_entry, unique_id ) -> None: """Test user config.""" @@ -58,30 +95,57 @@ async def test_user( connect_legacy.return_value.async_get_nvram.return_value = unique_id # test with all provided + legacy_result = await hass.config_entries.flow.async_configure( + flow_result["flow_id"], user_input=CONFIG_DATA_TELNET + ) + await hass.async_block_till_done() + + assert legacy_result["type"] == data_entry_flow.FlowResultType.FORM + assert legacy_result["step_id"] == "legacy" + + # complete configuration result = await hass.config_entries.flow.async_configure( - flow_result["flow_id"], - user_input=CONFIG_DATA_TELNET, + legacy_result["flow_id"], user_input={CONF_MODE: MODE_AP} ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY assert result["title"] == HOST - assert result["data"] == CONFIG_DATA_TELNET + assert result["data"] == {**CONFIG_DATA_TELNET, CONF_MODE: MODE_AP} assert len(patch_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize( - ("config", "error"), - [ - ({}, "pwd_or_ssh"), - ({CONF_PASSWORD: "pwd", CONF_SSH_KEY: SSH_KEY}, "pwd_and_ssh"), - ], -) -async def test_error_wrong_password_ssh(hass: HomeAssistant, config, error) -> None: - """Test we abort for wrong password and ssh file combination.""" - config_data = {k: v for k, v in CONFIG_DATA_TELNET.items() if k != CONF_PASSWORD} - config_data.update(config) +@pytest.mark.parametrize("unique_id", [None, ROUTER_MAC_ADDR]) +async def test_user_http( + hass: HomeAssistant, connect_http, patch_setup_entry, unique_id +) -> None: + """Test user config http.""" + flow_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER, "show_advanced_options": True} + ) + assert flow_result["type"] == data_entry_flow.FlowResultType.FORM + assert flow_result["step_id"] == "user" + + connect_http.return_value.mac = unique_id + + # test with all provided + result = await hass.config_entries.flow.async_configure( + flow_result["flow_id"], user_input=CONFIG_DATA_HTTP + ) + await hass.async_block_till_done() + + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["title"] == HOST + assert result["data"] == CONFIG_DATA_HTTP + + assert len(patch_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize("config", [CONFIG_DATA_TELNET, CONFIG_DATA_HTTP]) +async def test_error_pwd_required(hass: HomeAssistant, config) -> None: + """Test we abort for missing password.""" + config_data = {k: v for k, v in config.items() if k != CONF_PASSWORD} result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER, "show_advanced_options": True}, @@ -89,12 +153,25 @@ async def test_error_wrong_password_ssh(hass: HomeAssistant, config, error) -> N ) assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["errors"] == {"base": error} + assert result["errors"] == {CONF_BASE: "pwd_required"} + + +async def test_error_no_password_ssh(hass: HomeAssistant) -> None: + """Test we abort for wrong password and ssh file combination.""" + config_data = {k: v for k, v in CONFIG_DATA_SSH.items() if k != CONF_PASSWORD} + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER, "show_advanced_options": True}, + data=config_data, + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["errors"] == {CONF_BASE: "pwd_or_ssh"} async def test_error_invalid_ssh(hass: HomeAssistant, patch_is_file) -> None: """Test we abort if invalid ssh file is provided.""" - config_data = {k: v for k, v in CONFIG_DATA_TELNET.items() if k != CONF_PASSWORD} + config_data = {k: v for k, v in CONFIG_DATA_SSH.items() if k != CONF_PASSWORD} config_data[CONF_SSH_KEY] = SSH_KEY patch_is_file.return_value = False @@ -105,7 +182,7 @@ async def test_error_invalid_ssh(hass: HomeAssistant, patch_is_file) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["errors"] == {"base": "ssh_not_file"} + assert result["errors"] == {CONF_BASE: "ssh_not_file"} async def test_error_invalid_host(hass: HomeAssistant, patch_get_host) -> None: @@ -118,7 +195,7 @@ async def test_error_invalid_host(hass: HomeAssistant, patch_get_host) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["errors"] == {"base": "invalid_host"} + assert result["errors"] == {CONF_BASE: "invalid_host"} async def test_abort_if_not_unique_id_setup(hass: HomeAssistant) -> None: @@ -138,27 +215,26 @@ async def test_abort_if_not_unique_id_setup(hass: HomeAssistant) -> None: async def test_update_uniqueid_exist( - hass: HomeAssistant, connect_legacy, patch_setup_entry + hass: HomeAssistant, connect_http, patch_setup_entry ) -> None: """Test we update entry if uniqueid is already configured.""" existing_entry = MockConfigEntry( domain=DOMAIN, - data={**CONFIG_DATA_TELNET, CONF_HOST: "10.10.10.10"}, + data={**CONFIG_DATA_HTTP, CONF_HOST: "10.10.10.10"}, unique_id=ROUTER_MAC_ADDR, ) existing_entry.add_to_hass(hass) - # test with all provided result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER, "show_advanced_options": True}, - data=CONFIG_DATA_TELNET, + data=CONFIG_DATA_HTTP, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY assert result["title"] == HOST - assert result["data"] == CONFIG_DATA_TELNET + assert result["data"] == CONFIG_DATA_HTTP prev_entry = hass.config_entries.async_get_entry(existing_entry.entry_id) assert not prev_entry @@ -190,10 +266,10 @@ async def test_abort_invalid_unique_id(hass: HomeAssistant, connect_legacy) -> N (None, "cannot_connect"), ], ) -async def test_on_connect_failed( +async def test_on_connect_legacy_failed( hass: HomeAssistant, connect_legacy, side_effect, error ) -> None: - """Test when we have errors connecting the router.""" + """Test when we have errors connecting the router with legacy library.""" flow_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER, "show_advanced_options": True}, @@ -202,11 +278,43 @@ async def test_on_connect_failed( connect_legacy.return_value.is_connected = False connect_legacy.return_value.connection.async_connect.side_effect = side_effect + # go to legacy form result = await hass.config_entries.flow.async_configure( flow_result["flow_id"], user_input=CONFIG_DATA_TELNET ) + await hass.async_block_till_done() + assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["errors"] == {"base": error} + assert result["errors"] == {CONF_BASE: error} + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (AsusWrtError, "cannot_connect"), + (TypeError, "unknown"), + (None, "cannot_connect"), + ], +) +async def test_on_connect_http_failed( + hass: HomeAssistant, connect_http, side_effect, error +) -> None: + """Test when we have errors connecting the router with http library.""" + flow_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER, "show_advanced_options": True}, + ) + + connect_http.return_value.is_connected = False + connect_http.return_value.async_connect.side_effect = side_effect + + result = await hass.config_entries.flow.async_configure( + flow_result["flow_id"], user_input=CONFIG_DATA_HTTP + ) + await hass.async_block_till_done() + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["errors"] == {CONF_BASE: error} async def test_options_flow_ap(hass: HomeAssistant, patch_setup_entry) -> None: @@ -251,7 +359,7 @@ async def test_options_flow_router(hass: HomeAssistant, patch_setup_entry) -> No """Test config flow options for router mode.""" config_entry = MockConfigEntry( domain=DOMAIN, - data=CONFIG_DATA_TELNET, + data={**CONFIG_DATA_TELNET, CONF_MODE: MODE_ROUTER}, ) config_entry.add_to_hass(hass) @@ -280,3 +388,36 @@ async def test_options_flow_router(hass: HomeAssistant, patch_setup_entry) -> No CONF_INTERFACE: "aaa", CONF_DNSMASQ: "bbb", } + + +async def test_options_flow_http(hass: HomeAssistant, patch_setup_entry) -> None: + """Test config flow options for http mode.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={**CONFIG_DATA_HTTP, CONF_MODE: MODE_ROUTER}, + ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "init" + assert CONF_INTERFACE not in result["data_schema"].schema + assert CONF_DNSMASQ not in result["data_schema"].schema + assert CONF_REQUIRE_IP not in result["data_schema"].schema + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_CONSIDER_HOME: 20, + CONF_TRACK_UNKNOWN: True, + }, + ) + + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert config_entry.options == { + CONF_CONSIDER_HOME: 20, + CONF_TRACK_UNKNOWN: True, + } diff --git a/tests/components/asuswrt/test_sensor.py b/tests/components/asuswrt/test_sensor.py index b2fa13101bc..a7b19bb3785 100644 --- a/tests/components/asuswrt/test_sensor.py +++ b/tests/components/asuswrt/test_sensor.py @@ -1,6 +1,7 @@ """Tests for the AsusWrt sensor.""" from datetime import timedelta +from pyasuswrt.asuswrt import AsusWrtError import pytest from homeassistant.components import device_tracker, sensor @@ -14,19 +15,32 @@ from homeassistant.components.asuswrt.const import ( ) from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE +from homeassistant.const import ( + CONF_PROTOCOL, + STATE_HOME, + STATE_NOT_HOME, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import slugify from homeassistant.util.dt import utcnow -from .common import CONFIG_DATA_TELNET, HOST, MOCK_MACS, ROUTER_MAC_ADDR, new_device +from .common import ( + CONFIG_DATA_HTTP, + CONFIG_DATA_TELNET, + HOST, + MOCK_MACS, + ROUTER_MAC_ADDR, + new_device, +) from tests.common import MockConfigEntry, async_fire_time_changed SENSORS_DEFAULT = [*SENSORS_BYTES, *SENSORS_RATES] SENSORS_ALL_LEGACY = [*SENSORS_DEFAULT, *SENSORS_LOAD_AVG, *SENSORS_TEMPERATURES] +SENSORS_ALL_HTTP = [*SENSORS_DEFAULT, *SENSORS_LOAD_AVG, *SENSORS_TEMPERATURES] @pytest.fixture(name="create_device_registry_devices") @@ -132,8 +146,12 @@ async def _test_sensors( assert hass.states.get(f"{sensor_prefix}_devices_connected").state == "1" # add 2 new devices, one unnamed that should be ignored but counted - mock_devices[MOCK_MACS[2]] = new_device(MOCK_MACS[2], "192.168.1.4", "TestThree") - mock_devices[MOCK_MACS[3]] = new_device(MOCK_MACS[3], "192.168.1.5", None) + mock_devices[MOCK_MACS[2]] = new_device( + config[CONF_PROTOCOL], MOCK_MACS[2], "192.168.1.4", "TestThree" + ) + mock_devices[MOCK_MACS[3]] = new_device( + config[CONF_PROTOCOL], MOCK_MACS[3], "192.168.1.5", None + ) # change consider home settings to have status not home of removed tracked device hass.config_entries.async_update_entry( @@ -154,7 +172,7 @@ async def _test_sensors( "entry_unique_id", [None, ROUTER_MAC_ADDR], ) -async def test_sensors( +async def test_sensors_legacy( hass: HomeAssistant, connect_legacy, mock_devices_legacy, @@ -165,11 +183,24 @@ async def test_sensors( await _test_sensors(hass, mock_devices_legacy, CONFIG_DATA_TELNET, entry_unique_id) -async def test_loadavg_sensors(hass: HomeAssistant, connect_legacy) -> None: +@pytest.mark.parametrize( + "entry_unique_id", + [None, ROUTER_MAC_ADDR], +) +async def test_sensors_http( + hass: HomeAssistant, + connect_http, + mock_devices_http, + create_device_registry_devices, + entry_unique_id, +) -> None: + """Test creating AsusWRT default sensors and tracker with http protocol.""" + await _test_sensors(hass, mock_devices_http, CONFIG_DATA_HTTP, entry_unique_id) + + +async def _test_loadavg_sensors(hass: HomeAssistant, config) -> None: """Test creating an AsusWRT load average sensors.""" - config_entry, sensor_prefix = _setup_entry( - hass, CONFIG_DATA_TELNET, SENSORS_LOAD_AVG - ) + config_entry, sensor_prefix = _setup_entry(hass, config, SENSORS_LOAD_AVG) config_entry.add_to_hass(hass) # initial devices setup @@ -184,13 +215,40 @@ async def test_loadavg_sensors(hass: HomeAssistant, connect_legacy) -> None: assert hass.states.get(f"{sensor_prefix}_sensor_load_avg15").state == "1.3" -async def test_temperature_sensors(hass: HomeAssistant, connect_legacy) -> None: - """Test creating a AsusWRT temperature sensors.""" +async def test_loadavg_sensors_legacy(hass: HomeAssistant, connect_legacy) -> None: + """Test creating an AsusWRT load average sensors.""" + await _test_loadavg_sensors(hass, CONFIG_DATA_TELNET) + + +async def test_loadavg_sensors_http(hass: HomeAssistant, connect_http) -> None: + """Test creating an AsusWRT load average sensors.""" + await _test_loadavg_sensors(hass, CONFIG_DATA_HTTP) + + +async def test_temperature_sensors_http_fail( + hass: HomeAssistant, connect_http_sens_fail +) -> None: + """Test fail creating AsusWRT temperature sensors.""" config_entry, sensor_prefix = _setup_entry( - hass, CONFIG_DATA_TELNET, SENSORS_TEMPERATURES + hass, CONFIG_DATA_HTTP, SENSORS_TEMPERATURES ) config_entry.add_to_hass(hass) + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # assert temperature availability exception is handled correctly + assert not hass.states.get(f"{sensor_prefix}_2_4ghz") + assert not hass.states.get(f"{sensor_prefix}_5_0ghz") + assert not hass.states.get(f"{sensor_prefix}_cpu") + + +async def _test_temperature_sensors(hass: HomeAssistant, config) -> None: + """Test creating a AsusWRT temperature sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, config, SENSORS_TEMPERATURES) + config_entry.add_to_hass(hass) + # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -203,11 +261,23 @@ async def test_temperature_sensors(hass: HomeAssistant, connect_legacy) -> None: assert hass.states.get(f"{sensor_prefix}_cpu").state == "71.2" +async def test_temperature_sensors_legacy(hass: HomeAssistant, connect_legacy) -> None: + """Test creating a AsusWRT temperature sensors.""" + await _test_temperature_sensors(hass, CONFIG_DATA_TELNET) + + +async def test_temperature_sensors_http(hass: HomeAssistant, connect_http) -> None: + """Test creating a AsusWRT temperature sensors.""" + await _test_temperature_sensors(hass, CONFIG_DATA_HTTP) + + @pytest.mark.parametrize( "side_effect", [OSError, None], ) -async def test_connect_fail(hass: HomeAssistant, connect_legacy, side_effect) -> None: +async def test_connect_fail_legacy( + hass: HomeAssistant, connect_legacy, side_effect +) -> None: """Test AsusWRT connect fail.""" # init config entry @@ -226,22 +296,43 @@ async def test_connect_fail(hass: HomeAssistant, connect_legacy, side_effect) -> assert config_entry.state is ConfigEntryState.SETUP_RETRY -async def test_sensors_polling_fails( - hass: HomeAssistant, connect_legacy_sens_fail +@pytest.mark.parametrize( + "side_effect", + [AsusWrtError, None], +) +async def test_connect_fail_http( + hass: HomeAssistant, connect_http, side_effect ) -> None: - """Test AsusWRT sensors are unavailable when polling fails.""" - config_entry, sensor_prefix = _setup_entry( - hass, CONFIG_DATA_TELNET, SENSORS_ALL_LEGACY + """Test AsusWRT connect fail.""" + + # init config entry + config_entry = MockConfigEntry( + domain=DOMAIN, + data=CONFIG_DATA_HTTP, ) config_entry.add_to_hass(hass) + connect_http.return_value.async_connect.side_effect = side_effect + connect_http.return_value.is_connected = False + + # initial setup fail + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def _test_sensors_polling_fails(hass: HomeAssistant, config, sensors) -> None: + """Test AsusWRT sensors are unavailable when polling fails.""" + config_entry, sensor_prefix = _setup_entry(hass, config, sensors) + config_entry.add_to_hass(hass) + # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() - for sensor_name in SENSORS_ALL_LEGACY: + for sensor_name in sensors: assert ( hass.states.get(f"{sensor_prefix}_{slugify(sensor_name)}").state == STATE_UNAVAILABLE @@ -249,6 +340,23 @@ async def test_sensors_polling_fails( assert hass.states.get(f"{sensor_prefix}_devices_connected").state == "0" +async def test_sensors_polling_fails_legacy( + hass: HomeAssistant, + connect_legacy_sens_fail, +) -> None: + """Test AsusWRT sensors are unavailable when polling fails.""" + await _test_sensors_polling_fails(hass, CONFIG_DATA_TELNET, SENSORS_ALL_LEGACY) + + +async def test_sensors_polling_fails_http( + hass: HomeAssistant, + connect_http_sens_fail, + connect_http_sens_detect, +) -> None: + """Test AsusWRT sensors are unavailable when polling fails.""" + await _test_sensors_polling_fails(hass, CONFIG_DATA_HTTP, SENSORS_ALL_HTTP) + + async def test_options_reload(hass: HomeAssistant, connect_legacy) -> None: """Test AsusWRT integration is reload changing an options that require this.""" config_entry = MockConfigEntry( diff --git a/tests/components/aurora_abb_powerone/test_config_flow.py b/tests/components/aurora_abb_powerone/test_config_flow.py index b30da3ce348..d156dce2154 100644 --- a/tests/components/aurora_abb_powerone/test_config_flow.py +++ b/tests/components/aurora_abb_powerone/test_config_flow.py @@ -1,5 +1,4 @@ """Test the Aurora ABB PowerOne Solar PV config flow.""" -from logging import INFO from unittest.mock import patch from aurorapy.client import AuroraError, AuroraTimeoutError @@ -49,9 +48,6 @@ async def test_form(hass: HomeAssistant) -> None: ), patch( "aurorapy.client.AuroraSerialClient.firmware", return_value="1.234", - ), patch( - "homeassistant.components.aurora_abb_powerone.config_flow._LOGGER.getEffectiveLevel", - return_value=INFO, ) as mock_setup, patch( "homeassistant.components.aurora_abb_powerone.async_setup_entry", return_value=True, diff --git a/tests/components/aurora_abb_powerone/test_init.py b/tests/components/aurora_abb_powerone/test_init.py index f88cab0cb46..92b448d8645 100644 --- a/tests/components/aurora_abb_powerone/test_init.py +++ b/tests/components/aurora_abb_powerone/test_init.py @@ -18,9 +18,6 @@ async def test_unload_entry(hass: HomeAssistant) -> None: """Test unloading the aurora_abb_powerone entry.""" with patch("aurorapy.client.AuroraSerialClient.connect", return_value=None), patch( - "homeassistant.components.aurora_abb_powerone.sensor.AuroraSensor.update", - return_value=None, - ), patch( "aurorapy.client.AuroraSerialClient.serial_number", return_value="9876543", ), patch( diff --git a/tests/components/aurora_abb_powerone/test_sensor.py b/tests/components/aurora_abb_powerone/test_sensor.py index 8fbe29f9979..61521c49b79 100644 --- a/tests/components/aurora_abb_powerone/test_sensor.py +++ b/tests/components/aurora_abb_powerone/test_sensor.py @@ -1,8 +1,8 @@ """Test the Aurora ABB PowerOne Solar PV sensors.""" -from datetime import timedelta from unittest.mock import patch from aurorapy.client import AuroraError, AuroraTimeoutError +from freezegun.api import FrozenDateTimeFactory from homeassistant.components.aurora_abb_powerone.const import ( ATTR_DEVICE_NAME, @@ -11,10 +11,10 @@ from homeassistant.components.aurora_abb_powerone.const import ( ATTR_SERIAL_NUMBER, DEFAULT_INTEGRATION_TITLE, DOMAIN, + SCAN_INTERVAL, ) from homeassistant.const import CONF_ADDRESS, CONF_PORT from homeassistant.core import HomeAssistant -import homeassistant.util.dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed @@ -95,14 +95,16 @@ async def test_sensors(hass: HomeAssistant) -> None: assert energy.state == "12.35" -async def test_sensor_dark(hass: HomeAssistant) -> None: +async def test_sensor_dark(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: """Test that darkness (no comms) is handled correctly.""" mock_entry = _mock_config_entry() - utcnow = dt_util.utcnow() # sun is up with patch("aurorapy.client.AuroraSerialClient.connect", return_value=None), patch( "aurorapy.client.AuroraSerialClient.measure", side_effect=_simulated_returns + ), patch( + "aurorapy.client.AuroraSerialClient.cumulated_energy", + side_effect=_simulated_returns, ), patch( "aurorapy.client.AuroraSerialClient.serial_number", return_value="9876543", @@ -128,16 +130,24 @@ async def test_sensor_dark(hass: HomeAssistant) -> None: with patch("aurorapy.client.AuroraSerialClient.connect", return_value=None), patch( "aurorapy.client.AuroraSerialClient.measure", side_effect=AuroraTimeoutError("No response after 10 seconds"), + ), patch( + "aurorapy.client.AuroraSerialClient.cumulated_energy", + side_effect=AuroraTimeoutError("No response after 3 tries"), ): - async_fire_time_changed(hass, utcnow + timedelta(seconds=60)) + freezer.tick(SCAN_INTERVAL * 2) + async_fire_time_changed(hass) await hass.async_block_till_done() - power = hass.states.get("sensor.mydevicename_power_output") + power = hass.states.get("sensor.mydevicename_total_energy") assert power.state == "unknown" # sun rose again with patch("aurorapy.client.AuroraSerialClient.connect", return_value=None), patch( "aurorapy.client.AuroraSerialClient.measure", side_effect=_simulated_returns + ), patch( + "aurorapy.client.AuroraSerialClient.cumulated_energy", + side_effect=_simulated_returns, ): - async_fire_time_changed(hass, utcnow + timedelta(seconds=60)) + freezer.tick(SCAN_INTERVAL * 4) + async_fire_time_changed(hass) await hass.async_block_till_done() power = hass.states.get("sensor.mydevicename_power_output") assert power is not None @@ -146,8 +156,12 @@ async def test_sensor_dark(hass: HomeAssistant) -> None: with patch("aurorapy.client.AuroraSerialClient.connect", return_value=None), patch( "aurorapy.client.AuroraSerialClient.measure", side_effect=AuroraTimeoutError("No response after 10 seconds"), + ), patch( + "aurorapy.client.AuroraSerialClient.cumulated_energy", + side_effect=AuroraError("No response after 10 seconds"), ): - async_fire_time_changed(hass, utcnow + timedelta(seconds=60)) + freezer.tick(SCAN_INTERVAL * 6) + async_fire_time_changed(hass) await hass.async_block_till_done() power = hass.states.get("sensor.mydevicename_power_output") assert power.state == "unknown" # should this be 'available'? @@ -160,7 +174,7 @@ async def test_sensor_unknown_error(hass: HomeAssistant) -> None: with patch("aurorapy.client.AuroraSerialClient.connect", return_value=None), patch( "aurorapy.client.AuroraSerialClient.measure", side_effect=AuroraError("another error"), - ): + ), patch("serial.Serial.isOpen", return_value=True): mock_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/auth/__init__.py b/tests/components/auth/__init__.py index 7ce65964086..8b731934913 100644 --- a/tests/components/auth/__init__.py +++ b/tests/components/auth/__init__.py @@ -1,8 +1,13 @@ """Tests for the auth component.""" +from typing import Any + from homeassistant import auth +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import ensure_auth_manager_loaded +from tests.test_util import mock_real_ip +from tests.typing import ClientSessionGenerator BASE_CONFIG = [ { @@ -18,11 +23,12 @@ EMPTY_CONFIG = [] async def async_setup_auth( - hass, - aiohttp_client, - provider_configs=BASE_CONFIG, + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + provider_configs: list[dict[str, Any]] = BASE_CONFIG, module_configs=EMPTY_CONFIG, - setup_api=False, + setup_api: bool = False, + custom_ip: str | None = None, ): """Set up authentication and create an HTTP client.""" hass.auth = await auth.auth_manager_from_config( @@ -32,4 +38,6 @@ async def async_setup_auth( await async_setup_component(hass, "auth", {}) if setup_api: await async_setup_component(hass, "api", {}) + if custom_ip: + mock_real_ip(hass.http.app)(custom_ip) return await aiohttp_client(hass.http.app) diff --git a/tests/components/auth/test_login_flow.py b/tests/components/auth/test_login_flow.py index b44d8fb4a11..639bbb9a9cb 100644 --- a/tests/components/auth/test_login_flow.py +++ b/tests/components/auth/test_login_flow.py @@ -1,25 +1,141 @@ """Tests for the login flow.""" +from collections.abc import Callable from http import HTTPStatus +from typing import Any from unittest.mock import patch -from homeassistant.core import HomeAssistant +import pytest -from . import async_setup_auth +from homeassistant.auth.models import User +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import BASE_CONFIG, async_setup_auth from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI from tests.typing import ClientSessionGenerator +_TRUSTED_NETWORKS_CONFIG = { + "type": "trusted_networks", + "trusted_networks": ["192.168.0.1"], + "trusted_users": { + "192.168.0.1": [ + "a1ab982744b64757bf80515589258924", + {"group": "system-group"}, + ] + }, +} + +@pytest.mark.parametrize( + ("provider_configs", "ip", "expected"), + [ + ( + BASE_CONFIG, + None, + [{"name": "Example", "type": "insecure_example", "id": None}], + ), + ( + [_TRUSTED_NETWORKS_CONFIG], + None, + [], + ), + ( + [_TRUSTED_NETWORKS_CONFIG], + "192.168.0.1", + [{"name": "Trusted Networks", "type": "trusted_networks", "id": None}], + ), + ], +) async def test_fetch_auth_providers( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + provider_configs: list[dict[str, Any]], + ip: str | None, + expected: list[dict[str, Any]], ) -> None: """Test fetching auth providers.""" - client = await async_setup_auth(hass, aiohttp_client) + client = await async_setup_auth( + hass, aiohttp_client, provider_configs, custom_ip=ip + ) resp = await client.get("/auth/providers") assert resp.status == HTTPStatus.OK - assert await resp.json() == [ - {"name": "Example", "type": "insecure_example", "id": None} - ] + assert await resp.json() == expected + + +async def _test_fetch_auth_providers_home_assistant( + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + ip: str, + additional_expected_fn: Callable[[User], dict[str, Any]], +) -> None: + """Test fetching auth providers for homeassistant auth provider.""" + client = await async_setup_auth( + hass, aiohttp_client, [{"type": "homeassistant"}], custom_ip=ip + ) + + provider = hass.auth.auth_providers[0] + credentials = await provider.async_get_or_create_credentials({"username": "hello"}) + user = await hass.auth.async_get_or_create_user(credentials) + + expected = { + "name": "Home Assistant Local", + "type": "homeassistant", + "id": None, + **additional_expected_fn(user), + } + + resp = await client.get("/auth/providers") + assert resp.status == HTTPStatus.OK + assert await resp.json() == [expected] + + +@pytest.mark.parametrize( + "ip", + [ + "192.168.0.10", + "::ffff:192.168.0.10", + "1.2.3.4", + "2001:db8::1", + ], +) +async def test_fetch_auth_providers_home_assistant_person_not_loaded( + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + ip: str, +) -> None: + """Test fetching auth providers for homeassistant auth provider, where person integration is not loaded.""" + await _test_fetch_auth_providers_home_assistant( + hass, aiohttp_client, ip, lambda _: {} + ) + + +@pytest.mark.parametrize( + ("ip", "is_local"), + [ + ("192.168.0.10", True), + ("::ffff:192.168.0.10", True), + ("1.2.3.4", False), + ("2001:db8::1", False), + ], +) +async def test_fetch_auth_providers_home_assistant_person_loaded( + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + ip: str, + is_local: bool, +) -> None: + """Test fetching auth providers for homeassistant auth provider, where person integration is loaded.""" + domain = "person" + config = {domain: {"id": "1234", "name": "test person"}} + assert await async_setup_component(hass, domain, config) + + await _test_fetch_auth_providers_home_assistant( + hass, + aiohttp_client, + ip, + lambda user: {"users": {user.id: user.name}} if is_local else {}, + ) async def test_fetch_auth_providers_onboarding( diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 9d3b9889cd3..e23f86e545b 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -92,7 +92,8 @@ async def test_load_backups(hass: HomeAssistant) -> None: "date": TEST_BACKUP.date, }, ), patch( - "pathlib.Path.stat", return_value=MagicMock(st_size=TEST_BACKUP.size) + "pathlib.Path.stat", + return_value=MagicMock(st_size=TEST_BACKUP.size), ): await manager.load_backups() backups = await manager.get_backups() diff --git a/tests/components/balboa/test_binary_sensor.py b/tests/components/balboa/test_binary_sensor.py index e97887b154a..ee5f2bc353c 100644 --- a/tests/components/balboa/test_binary_sensor.py +++ b/tests/components/balboa/test_binary_sensor.py @@ -16,7 +16,7 @@ async def test_filters( ) -> None: """Test spa filters.""" for num in (1, 2): - sensor = f"{ENTITY_BINARY_SENSOR}filter{num}" + sensor = f"{ENTITY_BINARY_SENSOR}filter_cycle_{num}" state = hass.states.get(sensor) assert state.state == STATE_OFF @@ -33,7 +33,7 @@ async def test_circ_pump( hass: HomeAssistant, client: MagicMock, integration: MockConfigEntry ) -> None: """Test spa circ pump.""" - sensor = f"{ENTITY_BINARY_SENSOR}circ_pump" + sensor = f"{ENTITY_BINARY_SENSOR}circulation_pump" state = hass.states.get(sensor) assert state.state == STATE_OFF diff --git a/tests/components/balboa/test_climate.py b/tests/components/balboa/test_climate.py index 4967bcdfa38..90ef6c75e5f 100644 --- a/tests/components/balboa/test_climate.py +++ b/tests/components/balboa/test_climate.py @@ -38,7 +38,7 @@ HVAC_SETTINGS = [ HVACMode.AUTO, ] -ENTITY_CLIMATE = "climate.fakespa_climate" +ENTITY_CLIMATE = "climate.fakespa" async def test_spa_defaults( diff --git a/tests/components/blink/conftest.py b/tests/components/blink/conftest.py new file mode 100644 index 00000000000..946840c23b9 --- /dev/null +++ b/tests/components/blink/conftest.py @@ -0,0 +1,97 @@ +"""Fixtures for the Blink integration tests.""" +from unittest.mock import AsyncMock, MagicMock, create_autospec, patch +from uuid import uuid4 + +import blinkpy +import pytest + +from homeassistant.components.blink.const import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.common import MockConfigEntry + +CAMERA_ATTRIBUTES = { + "name": "Camera 1", + "camera_id": "111111", + "serial": "serial", + "temperature": None, + "temperature_c": 25.1, + "temperature_calibrated": None, + "battery": "ok", + "battery_voltage": None, + "thumbnail": "https://rest-u034.immedia-semi.com/api/v3/media/accounts/111111/networks/222222/lotus/333333/thumbnail/thumbnail.jpg?ts=1698141602&ext=", + "video": None, + "recent_clips": [], + "motion_enabled": True, + "motion_detected": False, + "wifi_strength": None, + "network_id": 222222, + "sync_module": "sync module", + "last_record": None, + "type": "lotus", +} + + +@pytest.fixture +def camera() -> MagicMock: + """Set up a Blink camera fixture.""" + mock_blink_camera = create_autospec(blinkpy.camera.BlinkCamera, instance=True) + mock_blink_camera.sync = AsyncMock(return_value=True) + mock_blink_camera.name = "Camera 1" + mock_blink_camera.camera_id = "111111" + mock_blink_camera.serial = "12345" + mock_blink_camera.motion_enabled = True + mock_blink_camera.temperature = 25.1 + mock_blink_camera.motion_detected = False + mock_blink_camera.wifi_strength = 2.1 + mock_blink_camera.camera_type = "lotus" + mock_blink_camera.attributes = CAMERA_ATTRIBUTES + return mock_blink_camera + + +@pytest.fixture(name="mock_blink_api") +def blink_api_fixture(camera) -> MagicMock: + """Set up Blink API fixture.""" + mock_blink_api = create_autospec(blinkpy.blinkpy.Blink, instance=True) + mock_blink_api.available = True + mock_blink_api.start = AsyncMock(return_value=True) + mock_blink_api.refresh = AsyncMock(return_value=True) + mock_blink_api.sync = MagicMock(return_value=True) + mock_blink_api.cameras = {camera.name: camera} + + with patch("homeassistant.components.blink.Blink") as class_mock: + class_mock.return_value = mock_blink_api + yield mock_blink_api + + +@pytest.fixture(name="mock_blink_auth_api") +def blink_auth_api_fixture() -> MagicMock: + """Set up Blink API fixture.""" + mock_blink_auth_api = create_autospec(blinkpy.auth.Auth, instance=True) + mock_blink_auth_api.check_key_required.return_value = False + mock_blink_auth_api.send_auth_key = AsyncMock(return_value=True) + + with patch("homeassistant.components.blink.Auth", autospec=True) as class_mock: + class_mock.return_value = mock_blink_auth_api + yield mock_blink_auth_api + + +@pytest.fixture(name="mock_config_entry") +def mock_config_fixture(): + """Return a fake config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_USERNAME: "test_user", + CONF_PASSWORD: "Password", + "device_id": "Home Assistant", + "uid": "BlinkCamera_e1233333e2-0909-09cd-777a-123456789012", + "token": "A_token", + "host": "u034.immedia-semi.com", + "region_id": "u034", + "client_id": 123456, + "account_id": 654321, + }, + entry_id=str(uuid4()), + version=3, + ) diff --git a/tests/components/blink/snapshots/test_diagnostics.ambr b/tests/components/blink/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..7fb13c97548 --- /dev/null +++ b/tests/components/blink/snapshots/test_diagnostics.ambr @@ -0,0 +1,52 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'cameras': dict({ + 'Camera 1': dict({ + 'battery': 'ok', + 'battery_voltage': None, + 'camera_id': '111111', + 'last_record': None, + 'motion_detected': False, + 'motion_enabled': True, + 'name': 'Camera 1', + 'network_id': 222222, + 'recent_clips': list([ + ]), + 'serial': '**REDACTED**', + 'sync_module': 'sync module', + 'temperature': None, + 'temperature_c': 25.1, + 'temperature_calibrated': None, + 'thumbnail': 'https://rest-u034.immedia-semi.com/api/v3/media/accounts/111111/networks/222222/lotus/333333/thumbnail/thumbnail.jpg?ts=1698141602&ext=', + 'type': 'lotus', + 'video': None, + 'wifi_strength': None, + }), + }), + 'config_entry': dict({ + 'data': dict({ + 'account_id': 654321, + 'client_id': 123456, + 'device_id': 'Home Assistant', + 'host': 'u034.immedia-semi.com', + 'password': '**REDACTED**', + 'region_id': 'u034', + 'token': '**REDACTED**', + 'uid': 'BlinkCamera_e1233333e2-0909-09cd-777a-123456789012', + 'username': '**REDACTED**', + }), + 'disabled_by': None, + 'domain': 'blink', + 'options': dict({ + 'scan_interval': 300, + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 3, + }), + }) +# --- diff --git a/tests/components/blink/test_config_flow.py b/tests/components/blink/test_config_flow.py index ab04499c827..ada38451754 100644 --- a/tests/components/blink/test_config_flow.py +++ b/tests/components/blink/test_config_flow.py @@ -120,7 +120,8 @@ async def test_form_2fa_connect_error(hass: HomeAssistant) -> None: "homeassistant.components.blink.config_flow.Blink.setup_urls", side_effect=BlinkSetupError, ), patch( - "homeassistant.components.blink.async_setup_entry", return_value=True + "homeassistant.components.blink.async_setup_entry", + return_value=True, ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"pin": "1234"} @@ -161,7 +162,8 @@ async def test_form_2fa_invalid_key(hass: HomeAssistant) -> None: "homeassistant.components.blink.config_flow.Blink.setup_urls", return_value=True, ), patch( - "homeassistant.components.blink.async_setup_entry", return_value=True + "homeassistant.components.blink.async_setup_entry", + return_value=True, ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"pin": "1234"} @@ -200,7 +202,8 @@ async def test_form_2fa_unknown_error(hass: HomeAssistant) -> None: "homeassistant.components.blink.config_flow.Blink.setup_urls", side_effect=KeyError, ), patch( - "homeassistant.components.blink.async_setup_entry", return_value=True + "homeassistant.components.blink.async_setup_entry", + return_value=True, ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"pin": "1234"} diff --git a/tests/components/blink/test_diagnostics.py b/tests/components/blink/test_diagnostics.py new file mode 100644 index 00000000000..d447203dae6 --- /dev/null +++ b/tests/components/blink/test_diagnostics.py @@ -0,0 +1,33 @@ +"""Test Blink diagnostics.""" +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + +YAML_CONFIG = {"username": "test-user", "password": "test-password"} + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + mock_blink_api: MagicMock, + mock_config_entry: MagicMock, +) -> None: + """Test config entry diagnostics.""" + + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot(exclude=props("entry_id")) diff --git a/tests/components/blink/test_init.py b/tests/components/blink/test_init.py new file mode 100644 index 00000000000..f3d9beaf21a --- /dev/null +++ b/tests/components/blink/test_init.py @@ -0,0 +1,116 @@ +"""Test the Blink init.""" +import asyncio +from unittest.mock import AsyncMock, MagicMock + +from aiohttp import ClientError +import pytest + +from homeassistant.components.blink.const import ( + DOMAIN, + SERVICE_REFRESH, + SERVICE_SAVE_VIDEO, + SERVICE_SEND_PIN, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +CAMERA_NAME = "Camera 1" +FILENAME = "blah" +PIN = "1234" + + +@pytest.mark.parametrize( + ("the_error", "available"), + [(ClientError, False), (asyncio.TimeoutError, False), (None, False)], +) +async def test_setup_not_ready( + hass: HomeAssistant, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, + the_error, + available, +) -> None: + """Test setup failed because we can't connect to the Blink system.""" + + mock_blink_api.start = AsyncMock(side_effect=the_error) + mock_blink_api.available = available + + mock_config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_not_ready_authkey_required( + hass: HomeAssistant, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setup failed because 2FA is needed to connect to the Blink system.""" + + mock_blink_auth_api.check_key_required = MagicMock(return_value=True) + + mock_config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_unload_entry_multiple( + hass: HomeAssistant, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test being able to unload one of 2 entries.""" + + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + hass.data[DOMAIN]["dummy"] = {1: 2} + assert mock_config_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + assert hass.services.has_service(DOMAIN, SERVICE_REFRESH) + assert hass.services.has_service(DOMAIN, SERVICE_SAVE_VIDEO) + assert hass.services.has_service(DOMAIN, SERVICE_SEND_PIN) + + +async def test_migrate_V0( + hass: HomeAssistant, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test migration script version 0.""" + + mock_config_entry.version = 0 + + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize(("version"), [1, 2]) +async def test_migrate( + hass: HomeAssistant, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, + version, +) -> None: + """Test migration scripts.""" + + mock_config_entry.version = version + mock_config_entry.data = {**mock_config_entry.data, "login_response": "Blah"} + + mock_config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/blink/test_services.py b/tests/components/blink/test_services.py new file mode 100644 index 00000000000..ccc326dac1f --- /dev/null +++ b/tests/components/blink/test_services.py @@ -0,0 +1,377 @@ +"""Test the Blink services.""" +from unittest.mock import AsyncMock, MagicMock, Mock + +import pytest + +from homeassistant.components.blink.const import ( + DOMAIN, + SERVICE_REFRESH, + SERVICE_SAVE_RECENT_CLIPS, + SERVICE_SAVE_VIDEO, + SERVICE_SEND_PIN, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_DEVICE_ID, + CONF_FILE_PATH, + CONF_FILENAME, + CONF_NAME, + CONF_PIN, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import device_registry as dr + +from tests.common import MockConfigEntry + +CAMERA_NAME = "Camera 1" +FILENAME = "blah" +PIN = "1234" + + +async def test_refresh_service_calls( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test refrest service calls.""" + + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "12345")}) + + assert device_entry + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert mock_blink_api.refresh.call_count == 1 + + await hass.services.async_call( + DOMAIN, + SERVICE_REFRESH, + {ATTR_DEVICE_ID: [device_entry.id]}, + blocking=True, + ) + + assert mock_blink_api.refresh.call_count == 2 + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_REFRESH, + {ATTR_DEVICE_ID: ["bad-device_id"]}, + blocking=True, + ) + + +async def test_video_service_calls( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test video service calls.""" + + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "12345")}) + + assert device_entry + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert mock_blink_api.refresh.call_count == 1 + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_SAVE_VIDEO, + { + ATTR_DEVICE_ID: [device_entry.id], + CONF_NAME: CAMERA_NAME, + CONF_FILENAME: FILENAME, + }, + blocking=True, + ) + + hass.config.is_allowed_path = Mock(return_value=True) + caplog.clear() + mock_blink_api.cameras = {CAMERA_NAME: AsyncMock()} + await hass.services.async_call( + DOMAIN, + SERVICE_SAVE_VIDEO, + { + ATTR_DEVICE_ID: [device_entry.id], + CONF_NAME: CAMERA_NAME, + CONF_FILENAME: FILENAME, + }, + blocking=True, + ) + mock_blink_api.cameras[CAMERA_NAME].video_to_file.assert_awaited_once() + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_SAVE_VIDEO, + { + ATTR_DEVICE_ID: ["bad-device_id"], + CONF_NAME: CAMERA_NAME, + CONF_FILENAME: FILENAME, + }, + blocking=True, + ) + + mock_blink_api.cameras[CAMERA_NAME].video_to_file = AsyncMock(side_effect=OSError) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_SAVE_VIDEO, + { + ATTR_DEVICE_ID: [device_entry.id], + CONF_NAME: CAMERA_NAME, + CONF_FILENAME: FILENAME, + }, + blocking=True, + ) + + hass.config.is_allowed_path = Mock(return_value=False) + + +async def test_picture_service_calls( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test picture servcie calls.""" + + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "12345")}) + + assert device_entry + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert mock_blink_api.refresh.call_count == 1 + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_SAVE_RECENT_CLIPS, + { + ATTR_DEVICE_ID: [device_entry.id], + CONF_NAME: CAMERA_NAME, + CONF_FILE_PATH: FILENAME, + }, + blocking=True, + ) + + hass.config.is_allowed_path = Mock(return_value=True) + mock_blink_api.cameras = {CAMERA_NAME: AsyncMock()} + + await hass.services.async_call( + DOMAIN, + SERVICE_SAVE_RECENT_CLIPS, + { + ATTR_DEVICE_ID: [device_entry.id], + CONF_NAME: CAMERA_NAME, + CONF_FILE_PATH: FILENAME, + }, + blocking=True, + ) + mock_blink_api.cameras[CAMERA_NAME].save_recent_clips.assert_awaited_once() + + mock_blink_api.cameras[CAMERA_NAME].save_recent_clips = AsyncMock( + side_effect=OSError + ) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_SAVE_RECENT_CLIPS, + { + ATTR_DEVICE_ID: [device_entry.id], + CONF_NAME: CAMERA_NAME, + CONF_FILE_PATH: FILENAME, + }, + blocking=True, + ) + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_SAVE_RECENT_CLIPS, + { + ATTR_DEVICE_ID: ["bad-device_id"], + CONF_NAME: CAMERA_NAME, + CONF_FILE_PATH: FILENAME, + }, + blocking=True, + ) + + +async def test_pin_service_calls( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pin service calls.""" + + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "12345")}) + + assert device_entry + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert mock_blink_api.refresh.call_count == 1 + + await hass.services.async_call( + DOMAIN, + SERVICE_SEND_PIN, + {ATTR_DEVICE_ID: [device_entry.id], CONF_PIN: PIN}, + blocking=True, + ) + assert mock_blink_api.auth.send_auth_key.assert_awaited_once + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_SEND_PIN, + {ATTR_DEVICE_ID: ["bad-device_id"], CONF_PIN: PIN}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "params"), + [ + (SERVICE_SEND_PIN, {CONF_PIN: PIN}), + ( + SERVICE_SAVE_RECENT_CLIPS, + { + CONF_NAME: CAMERA_NAME, + CONF_FILE_PATH: FILENAME, + }, + ), + ( + SERVICE_SAVE_VIDEO, + { + CONF_NAME: CAMERA_NAME, + CONF_FILENAME: FILENAME, + }, + ), + ], +) +async def test_service_called_with_non_blink_device( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, + service, + params, +) -> None: + """Test service calls with non blink device.""" + + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + other_domain = "NotBlink" + other_config_id = "555" + await hass.config_entries.async_add( + MockConfigEntry( + title="Not Blink", domain=other_domain, entry_id=other_config_id + ) + ) + device_entry = device_registry.async_get_or_create( + config_entry_id=other_config_id, + identifiers={ + (other_domain, 1), + }, + ) + + hass.config.is_allowed_path = Mock(return_value=True) + mock_blink_api.cameras = {CAMERA_NAME: AsyncMock()} + + parameters = {ATTR_DEVICE_ID: [device_entry.id]} + parameters.update(params) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + service, + parameters, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "params"), + [ + (SERVICE_SEND_PIN, {CONF_PIN: PIN}), + ( + SERVICE_SAVE_RECENT_CLIPS, + { + CONF_NAME: CAMERA_NAME, + CONF_FILE_PATH: FILENAME, + }, + ), + ( + SERVICE_SAVE_VIDEO, + { + CONF_NAME: CAMERA_NAME, + CONF_FILENAME: FILENAME, + }, + ), + ], +) +async def test_service_called_with_unloaded_entry( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_blink_api: MagicMock, + mock_blink_auth_api: MagicMock, + mock_config_entry: MockConfigEntry, + service, + params, +) -> None: + """Test service calls with unloaded config entry.""" + + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + await mock_config_entry.async_unload(hass) + + device_entry = device_registry.async_get_device(identifiers={(DOMAIN, "12345")}) + + assert device_entry + + hass.config.is_allowed_path = Mock(return_value=True) + mock_blink_api.cameras = {CAMERA_NAME: AsyncMock()} + + parameters = {ATTR_DEVICE_ID: [device_entry.id]} + parameters.update(params) + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + service, + parameters, + blocking=True, + ) diff --git a/tests/components/blueprint/test_models.py b/tests/components/blueprint/test_models.py index b2d3ce517d8..c11a467de9b 100644 --- a/tests/components/blueprint/test_models.py +++ b/tests/components/blueprint/test_models.py @@ -1,6 +1,6 @@ """Test blueprint models.""" import logging -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -49,7 +49,7 @@ def blueprint_2(): def domain_bps(hass): """Domain blueprints fixture.""" return models.DomainBlueprints( - hass, "automation", logging.getLogger(__name__), None + hass, "automation", logging.getLogger(__name__), None, AsyncMock() ) @@ -257,13 +257,9 @@ async def test_domain_blueprints_inputs_from_config(domain_bps, blueprint_1) -> async def test_domain_blueprints_add_blueprint(domain_bps, blueprint_1) -> None: """Test DomainBlueprints.async_add_blueprint.""" with patch.object(domain_bps, "_create_file") as create_file_mock: - # Should add extension when not present. - await domain_bps.async_add_blueprint(blueprint_1, "something") + await domain_bps.async_add_blueprint(blueprint_1, "something.yaml") assert create_file_mock.call_args[0][1] == "something.yaml" - await domain_bps.async_add_blueprint(blueprint_1, "something2.yaml") - assert create_file_mock.call_args[0][1] == "something2.yaml" - # Should be in cache. with patch.object(domain_bps, "_load_blueprint") as mock_load: assert await domain_bps.async_get_blueprint("something.yaml") == blueprint_1 diff --git a/tests/components/blueprint/test_websocket_api.py b/tests/components/blueprint/test_websocket_api.py index f831445b60c..b0439896c25 100644 --- a/tests/components/blueprint/test_websocket_api.py +++ b/tests/components/blueprint/test_websocket_api.py @@ -3,6 +3,7 @@ from pathlib import Path from unittest.mock import Mock, patch import pytest +import yaml from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -129,6 +130,52 @@ async def test_import_blueprint( }, }, "validation_errors": None, + "exists": False, + } + + +async def test_import_blueprint_update( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hass_ws_client: WebSocketGenerator, + setup_bp, +) -> None: + """Test importing blueprints.""" + raw_data = Path( + hass.config.path("blueprints/automation/in_folder/in_folder_blueprint.yaml") + ).read_text() + + aioclient_mock.get( + "https://raw.githubusercontent.com/in_folder/home-assistant-config/main/blueprints/automation/in_folder_blueprint.yaml", + text=raw_data, + ) + + client = await hass_ws_client(hass) + await client.send_json( + { + "id": 5, + "type": "blueprint/import", + "url": "https://github.com/in_folder/home-assistant-config/blob/main/blueprints/automation/in_folder_blueprint.yaml", + } + ) + + msg = await client.receive_json() + + assert msg["id"] == 5 + assert msg["success"] + assert msg["result"] == { + "suggested_filename": "in_folder/in_folder_blueprint", + "raw_data": raw_data, + "blueprint": { + "metadata": { + "domain": "automation", + "input": {"action": None, "trigger": None}, + "name": "In Folder Blueprint", + "source_url": "https://github.com/in_folder/home-assistant-config/blob/main/blueprints/automation/in_folder_blueprint.yaml", + } + }, + "validation_errors": None, + "exists": True, } @@ -212,6 +259,42 @@ async def test_save_existing_file( assert msg["error"] == {"code": "already_exists", "message": "File already exists"} +async def test_save_existing_file_override( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test saving blueprints.""" + + client = await hass_ws_client(hass) + with patch("pathlib.Path.write_text") as write_mock: + await client.send_json( + { + "id": 7, + "type": "blueprint/save", + "path": "test_event_service", + "yaml": 'blueprint: {name: "name", domain: "automation"}', + "domain": "automation", + "source_url": "https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/test_event_service.yaml", + "allow_override": True, + } + ) + + msg = await client.receive_json() + + assert msg["id"] == 7 + assert msg["success"] + assert msg["result"] == {"overrides_existing": True} + assert yaml.safe_load(write_mock.mock_calls[0][1][0]) == { + "blueprint": { + "name": "name", + "domain": "automation", + "source_url": "https://github.com/balloob/home-assistant-config/blob/main/blueprints/automation/test_event_service.yaml", + "input": {}, + } + } + + async def test_save_file_error( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -355,7 +438,7 @@ async def test_delete_blueprint_in_use_by_automation( assert msg["id"] == 9 assert not msg["success"] assert msg["error"] == { - "code": "unknown_error", + "code": "home_assistant_error", "message": "Blueprint in use", } @@ -401,6 +484,6 @@ async def test_delete_blueprint_in_use_by_script( assert msg["id"] == 9 assert not msg["success"] assert msg["error"] == { - "code": "unknown_error", + "code": "home_assistant_error", "message": "Blueprint in use", } diff --git a/tests/components/bluetooth/conftest.py b/tests/components/bluetooth/conftest.py index 59c5cc822df..5f166a3fca2 100644 --- a/tests/components/bluetooth/conftest.py +++ b/tests/components/bluetooth/conftest.py @@ -47,12 +47,14 @@ def mock_operating_system_90(): def macos_adapter(): """Fixture that mocks the macos adapter.""" with patch("bleak.get_platform_scanner_backend_type"), patch( - "homeassistant.components.bluetooth.platform.system", return_value="Darwin" + "homeassistant.components.bluetooth.platform.system", + return_value="Darwin", ), patch( "homeassistant.components.bluetooth.scanner.platform.system", return_value="Darwin", ), patch( - "bluetooth_adapters.systems.platform.system", return_value="Darwin" + "bluetooth_adapters.systems.platform.system", + return_value="Darwin", ): yield @@ -71,14 +73,16 @@ def windows_adapter(): def no_adapter_fixture(): """Fixture that mocks no adapters on Linux.""" with patch( - "homeassistant.components.bluetooth.platform.system", return_value="Linux" + "homeassistant.components.bluetooth.platform.system", + return_value="Linux", ), patch( "homeassistant.components.bluetooth.scanner.platform.system", return_value="Linux", ), patch( - "bluetooth_adapters.systems.platform.system", return_value="Linux" + "bluetooth_adapters.systems.platform.system", + return_value="Linux", ), patch( - "bluetooth_adapters.systems.linux.LinuxAdapters.refresh" + "bluetooth_adapters.systems.linux.LinuxAdapters.refresh", ), patch( "bluetooth_adapters.systems.linux.LinuxAdapters.adapters", {}, @@ -90,14 +94,16 @@ def no_adapter_fixture(): def one_adapter_fixture(): """Fixture that mocks one adapter on Linux.""" with patch( - "homeassistant.components.bluetooth.platform.system", return_value="Linux" + "homeassistant.components.bluetooth.platform.system", + return_value="Linux", ), patch( "homeassistant.components.bluetooth.scanner.platform.system", return_value="Linux", ), patch( - "bluetooth_adapters.systems.platform.system", return_value="Linux" + "bluetooth_adapters.systems.platform.system", + return_value="Linux", ), patch( - "bluetooth_adapters.systems.linux.LinuxAdapters.refresh" + "bluetooth_adapters.systems.linux.LinuxAdapters.refresh", ), patch( "bluetooth_adapters.systems.linux.LinuxAdapters.adapters", { @@ -124,9 +130,7 @@ def two_adapters_fixture(): ), patch( "homeassistant.components.bluetooth.scanner.platform.system", return_value="Linux", - ), patch( - "bluetooth_adapters.systems.platform.system", return_value="Linux" - ), patch( + ), patch("bluetooth_adapters.systems.platform.system", return_value="Linux"), patch( "bluetooth_adapters.systems.linux.LinuxAdapters.refresh" ), patch( "bluetooth_adapters.systems.linux.LinuxAdapters.adapters", @@ -166,9 +170,7 @@ def one_adapter_old_bluez(): ), patch( "homeassistant.components.bluetooth.scanner.platform.system", return_value="Linux", - ), patch( - "bluetooth_adapters.systems.platform.system", return_value="Linux" - ), patch( + ), patch("bluetooth_adapters.systems.platform.system", return_value="Linux"), patch( "bluetooth_adapters.systems.linux.LinuxAdapters.refresh" ), patch( "bluetooth_adapters.systems.linux.LinuxAdapters.adapters", diff --git a/tests/components/bluetooth/test_wrappers.py b/tests/components/bluetooth/test_wrappers.py index de646f8ef9c..f69f8971479 100644 --- a/tests/components/bluetooth/test_wrappers.py +++ b/tests/components/bluetooth/test_wrappers.py @@ -7,6 +7,7 @@ from unittest.mock import patch import bleak from bleak.backends.device import BLEDevice from bleak.backends.scanner import AdvertisementData +from bleak.exc import BleakError import pytest from homeassistant.components.bluetooth import ( @@ -366,3 +367,25 @@ async def test_we_switch_adapters_on_failure( assert await client.connect() is False cancel_hci0() cancel_hci1() + + +async def test_raise_after_shutdown( + hass: HomeAssistant, + two_adapters: None, + enable_bluetooth: None, + install_bleak_catcher, + mock_platform_client_that_raises_on_connect, +) -> None: + """Ensure the slot gets released on connection exception.""" + manager = _get_manager() + hci0_device_advs, cancel_hci0, cancel_hci1 = _generate_scanners_with_fake_devices( + hass + ) + # hci0 has 2 slots, hci1 has 1 slot + with patch.object(manager, "shutdown", True): + ble_device = hci0_device_advs["00:00:00:00:00:01"][0] + client = bleak.BleakClient(ble_device) + with pytest.raises(BleakError, match="shutdown"): + await client.connect() + cancel_hci0() + cancel_hci1() diff --git a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr index 32405d93e6b..b3af5bc59b6 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr @@ -413,7 +413,6 @@ 'servicePack': 'WAVE_01', }), 'fetched_at': '2022-07-10T11:00:00+00:00', - 'is_metric': True, 'mappingInfo': dict({ 'isAssociated': False, 'isLmmEnabled': False, @@ -1288,7 +1287,6 @@ 'servicePack': 'WAVE_01', }), 'fetched_at': '2022-07-10T11:00:00+00:00', - 'is_metric': True, 'mappingInfo': dict({ 'isAssociated': False, 'isLmmEnabled': False, @@ -1979,7 +1977,6 @@ 'charging_settings': dict({ }), 'fetched_at': '2022-07-10T11:00:00+00:00', - 'is_metric': True, 'mappingInfo': dict({ 'isAssociated': False, 'isLmmEnabled': False, @@ -2734,7 +2731,6 @@ 'servicePack': 'TCB1', }), 'fetched_at': '2022-07-10T11:00:00+00:00', - 'is_metric': True, 'mappingInfo': dict({ 'isPrimaryUser': True, 'mappingStatus': 'CONFIRMED', @@ -5070,7 +5066,6 @@ 'servicePack': 'TCB1', }), 'fetched_at': '2022-07-10T11:00:00+00:00', - 'is_metric': True, 'mappingInfo': dict({ 'isPrimaryUser': True, 'mappingStatus': 'CONFIRMED', diff --git a/tests/components/bond/common.py b/tests/components/bond/common.py index 6fbcb928b5a..ff1f986583e 100644 --- a/tests/components/bond/common.py +++ b/tests/components/bond/common.py @@ -67,13 +67,9 @@ async def setup_bond_entity( enabled=patch_token ), patch_bond_version(enabled=patch_version), patch_bond_device_ids( enabled=patch_device_ids - ), patch_setup_entry( - "cover", enabled=patch_platforms - ), patch_setup_entry( + ), patch_setup_entry("cover", enabled=patch_platforms), patch_setup_entry( "fan", enabled=patch_platforms - ), patch_setup_entry( - "light", enabled=patch_platforms - ), patch_setup_entry( + ), patch_setup_entry("light", enabled=patch_platforms), patch_setup_entry( "switch", enabled=patch_platforms ): return await hass.config_entries.async_setup(config_entry.entry_id) @@ -102,15 +98,11 @@ async def setup_platform( "homeassistant.components.bond.PLATFORMS", [platform] ), patch_bond_version(return_value=bond_version), patch_bond_bridge( return_value=bridge - ), patch_bond_token( - return_value=token - ), patch_bond_device_ids( + ), patch_bond_token(return_value=token), patch_bond_device_ids( return_value=[bond_device_id] ), patch_start_bpup(), patch_bond_device( return_value=discovered_device - ), patch_bond_device_properties( - return_value=props - ), patch_bond_device_state( + ), patch_bond_device_properties(return_value=props), patch_bond_device_state( return_value=state ): assert await async_setup_component(hass, BOND_DOMAIN, {}) diff --git a/tests/components/bond/test_fan.py b/tests/components/bond/test_fan.py index db1c0fc787d..e202433c8d6 100644 --- a/tests/components/bond/test_fan.py +++ b/tests/components/bond/test_fan.py @@ -26,6 +26,7 @@ from homeassistant.components.fan import ( SERVICE_SET_PERCENTAGE, SERVICE_SET_PRESET_MODE, FanEntityFeature, + NotValidPresetModeError, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -251,10 +252,14 @@ async def test_turn_on_fan_preset_mode_not_supported(hass: HomeAssistant) -> Non props={"max_speed": 6}, ) - with patch_bond_action(), patch_bond_device_state(), pytest.raises(ValueError): + with patch_bond_action(), patch_bond_device_state(), pytest.raises( + NotValidPresetModeError + ): await turn_fan_on(hass, "fan.name_1", preset_mode=PRESET_MODE_BREEZE) - with patch_bond_action(), patch_bond_device_state(), pytest.raises(ValueError): + with patch_bond_action(), patch_bond_device_state(), pytest.raises( + NotValidPresetModeError + ): await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PRESET_MODE, diff --git a/tests/components/bond/test_init.py b/tests/components/bond/test_init.py index 92c11028173..6b462a02c26 100644 --- a/tests/components/bond/test_init.py +++ b/tests/components/bond/test_init.py @@ -184,9 +184,7 @@ async def test_old_identifiers_are_removed( "name": "test1", "type": DeviceType.GENERIC_DEVICE, } - ), patch_bond_device_properties( - return_value={} - ), patch_bond_device_state( + ), patch_bond_device_properties(return_value={}), patch_bond_device_state( return_value={} ): assert await hass.config_entries.async_setup(config_entry.entry_id) is True @@ -228,9 +226,7 @@ async def test_smart_by_bond_device_suggested_area( "type": DeviceType.GENERIC_DEVICE, "location": "Den", } - ), patch_bond_device_properties( - return_value={} - ), patch_bond_device_state( + ), patch_bond_device_properties(return_value={}), patch_bond_device_state( return_value={} ): assert await hass.config_entries.async_setup(config_entry.entry_id) is True @@ -275,9 +271,7 @@ async def test_bridge_device_suggested_area( "type": DeviceType.GENERIC_DEVICE, "location": "Bathroom", } - ), patch_bond_device_properties( - return_value={} - ), patch_bond_device_state( + ), patch_bond_device_properties(return_value={}), patch_bond_device_state( return_value={} ): assert await hass.config_entries.async_setup(config_entry.entry_id) is True diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index 44d87745b3f..b7939e4cb50 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -38,25 +38,15 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: yield mock_setup -@pytest.fixture -def mock_bsblan_config_flow() -> Generator[None, MagicMock, None]: - """Return a mocked BSBLAN client.""" - with patch( - "homeassistant.components.bsblan.config_flow.BSBLAN", autospec=True - ) as bsblan_mock: - bsblan = bsblan_mock.return_value - bsblan.device.return_value = Device.parse_raw( - load_fixture("device.json", DOMAIN) - ) - bsblan.info.return_value = Info.parse_raw(load_fixture("info.json", DOMAIN)) - yield bsblan - - @pytest.fixture def mock_bsblan(request: pytest.FixtureRequest) -> Generator[None, MagicMock, None]: """Return a mocked BSBLAN client.""" - with patch("homeassistant.components.bsblan.BSBLAN", autospec=True) as bsblan_mock: + with patch( + "homeassistant.components.bsblan.BSBLAN", autospec=True + ) as bsblan_mock, patch( + "homeassistant.components.bsblan.config_flow.BSBLAN", new=bsblan_mock + ): bsblan = bsblan_mock.return_value bsblan.info.return_value = Info.parse_raw(load_fixture("info.json", DOMAIN)) bsblan.device.return_value = Device.parse_raw( diff --git a/tests/components/bsblan/test_config_flow.py b/tests/components/bsblan/test_config_flow.py index dce881f2f7d..d82c32463d8 100644 --- a/tests/components/bsblan/test_config_flow.py +++ b/tests/components/bsblan/test_config_flow.py @@ -16,7 +16,7 @@ from tests.common import MockConfigEntry async def test_full_user_flow_implementation( hass: HomeAssistant, - mock_bsblan_config_flow: MagicMock, + mock_bsblan: MagicMock, mock_setup_entry: AsyncMock, ) -> None: """Test the full manual user flow from start to finish.""" @@ -52,7 +52,7 @@ async def test_full_user_flow_implementation( assert result2["result"].unique_id == format_mac("00:80:41:19:69:90") assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_bsblan_config_flow.device.mock_calls) == 1 + assert len(mock_bsblan.device.mock_calls) == 1 async def test_show_user_form(hass: HomeAssistant) -> None: @@ -68,10 +68,10 @@ async def test_show_user_form(hass: HomeAssistant) -> None: async def test_connection_error( hass: HomeAssistant, - mock_bsblan_config_flow: MagicMock, + mock_bsblan: MagicMock, ) -> None: """Test we show user form on BSBLan connection error.""" - mock_bsblan_config_flow.device.side_effect = BSBLANConnectionError + mock_bsblan.device.side_effect = BSBLANConnectionError result = await hass.config_entries.flow.async_init( DOMAIN, @@ -92,7 +92,7 @@ async def test_connection_error( async def test_user_device_exists_abort( hass: HomeAssistant, - mock_bsblan_config_flow: MagicMock, + mock_bsblan: MagicMock, mock_config_entry: MockConfigEntry, ) -> None: """Test we abort flow if BSBLAN device already configured.""" diff --git a/tests/components/caldav/test_todo.py b/tests/components/caldav/test_todo.py index 352b60d5ed3..6e92f211463 100644 --- a/tests/components/caldav/test_todo.py +++ b/tests/components/caldav/test_todo.py @@ -1,17 +1,23 @@ """The tests for the webdav todo component.""" +from typing import Any from unittest.mock import MagicMock, Mock +from caldav.lib.error import DAVError, NotFoundError from caldav.objects import Todo import pytest +from homeassistant.components.todo import DOMAIN as TODO_DOMAIN from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator CALENDAR_NAME = "My Tasks" ENTITY_NAME = "My tasks" TEST_ENTITY = "todo.my_tasks" +SUPPORTED_FEATURES = 119 TODO_NO_STATUS = """BEGIN:VCALENDAR VERSION:2.0 @@ -34,6 +40,12 @@ STATUS:NEEDS-ACTION END:VTODO END:VCALENDAR""" +RESULT_ITEM = { + "uid": "2", + "summary": "Cheese", + "status": "needs_action", +} + TODO_COMPLETED = """BEGIN:VCALENDAR VERSION:2.0 PRODID:-//E-Corp.//CalDAV Client//EN @@ -63,6 +75,12 @@ def platforms() -> list[Platform]: return [Platform.TODO] +@pytest.fixture(autouse=True) +def set_tz(hass: HomeAssistant) -> None: + """Fixture to set timezone with fixed offset year round.""" + hass.config.set_time_zone("America/Regina") + + @pytest.fixture(name="todos") def mock_todos() -> list[str]: """Fixture to return VTODO objects for the calendar.""" @@ -75,17 +93,32 @@ def mock_supported_components() -> list[str]: return ["VTODO"] -@pytest.fixture(name="calendars") -def mock_calendars(todos: list[str], supported_components: list[str]) -> list[Mock]: - """Fixture to create calendars for the test.""" +@pytest.fixture(name="calendar") +def mock_calendar(supported_components: list[str]) -> Mock: + """Fixture to create the primary calendar for the test.""" calendar = Mock() - items = [ - Todo(None, f"{idx}.ics", item, calendar, str(idx)) - for idx, item in enumerate(todos) - ] - calendar.search = MagicMock(return_value=items) + calendar.search = MagicMock(return_value=[]) calendar.name = CALENDAR_NAME calendar.get_supported_components = MagicMock(return_value=supported_components) + return calendar + + +def create_todo(calendar: Mock, idx: str, ics: str) -> Todo: + """Create a caldav Todo object.""" + return Todo(client=None, url=f"{idx}.ics", data=ics, parent=calendar, id=idx) + + +@pytest.fixture(autouse=True) +def mock_search_items(calendar: Mock, todos: list[str]) -> None: + """Fixture to add search results to the test calendar.""" + calendar.search.return_value = [ + create_todo(calendar, str(idx), item) for idx, item in enumerate(todos) + ] + + +@pytest.fixture(name="calendars") +def mock_calendars(calendar: Mock) -> list[Mock]: + """Fixture to create calendars for the test.""" return [calendar] @@ -137,6 +170,7 @@ async def test_todo_list_state( assert state.state == expected_state assert dict(state.attributes) == { "friendly_name": ENTITY_NAME, + "supported_features": SUPPORTED_FEATURES, } @@ -154,3 +188,478 @@ async def test_supported_components( state = hass.states.get(TEST_ENTITY) assert (state is not None) == has_entity + + +@pytest.mark.parametrize( + ("item_data", "expcted_save_args", "expected_item"), + [ + ( + {}, + {"status": "NEEDS-ACTION", "summary": "Cheese"}, + RESULT_ITEM, + ), + ( + {"due_date": "2023-11-18"}, + {"status": "NEEDS-ACTION", "summary": "Cheese", "due": "20231118"}, + {**RESULT_ITEM, "due": "2023-11-18"}, + ), + ( + {"due_datetime": "2023-11-18T08:30:00-06:00"}, + {"status": "NEEDS-ACTION", "summary": "Cheese", "due": "20231118T143000Z"}, + {**RESULT_ITEM, "due": "2023-11-18T08:30:00-06:00"}, + ), + ( + {"description": "Make sure to get Swiss"}, + { + "status": "NEEDS-ACTION", + "summary": "Cheese", + "description": "Make sure to get Swiss", + }, + {**RESULT_ITEM, "description": "Make sure to get Swiss"}, + ), + ], + ids=[ + "summary", + "due_date", + "due_datetime", + "description", + ], +) +async def test_add_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + dav_client: Mock, + calendar: Mock, + item_data: dict[str, Any], + expcted_save_args: dict[str, Any], + expected_item: dict[str, Any], +) -> None: + """Test adding an item to the list.""" + calendar.search.return_value = [] + await config_entry.async_setup(hass) + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == "0" + + # Simulate return value for the state update after the service call + calendar.search.return_value = [create_todo(calendar, "2", TODO_NEEDS_ACTION)] + + await hass.services.async_call( + TODO_DOMAIN, + "add_item", + {"item": "Cheese", **item_data}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + assert calendar.save_todo.call_args + assert calendar.save_todo.call_args.kwargs == expcted_save_args + + # Verify state was updated + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == "1" + + +async def test_add_item_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + calendar: Mock, +) -> None: + """Test failure when adding an item to the list.""" + await config_entry.async_setup(hass) + + calendar.save_todo.side_effect = DAVError() + + with pytest.raises(HomeAssistantError, match="CalDAV save error"): + await hass.services.async_call( + TODO_DOMAIN, + "add_item", + {"item": "Cheese"}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("update_data", "expected_ics", "expected_state", "expected_item"), + [ + ( + {"rename": "Swiss Cheese"}, + ["SUMMARY:Swiss Cheese", "STATUS:NEEDS-ACTION"], + "1", + {**RESULT_ITEM, "summary": "Swiss Cheese"}, + ), + ( + {"status": "needs_action"}, + ["SUMMARY:Cheese", "STATUS:NEEDS-ACTION"], + "1", + RESULT_ITEM, + ), + ( + {"status": "completed"}, + ["SUMMARY:Cheese", "STATUS:COMPLETED"], + "0", + {**RESULT_ITEM, "status": "completed"}, + ), + ( + {"rename": "Swiss Cheese", "status": "needs_action"}, + ["SUMMARY:Swiss Cheese", "STATUS:NEEDS-ACTION"], + "1", + {**RESULT_ITEM, "summary": "Swiss Cheese"}, + ), + ( + {"due_date": "2023-11-18"}, + ["SUMMARY:Cheese", "DUE:20231118"], + "1", + {**RESULT_ITEM, "due": "2023-11-18"}, + ), + ( + {"due_datetime": "2023-11-18T08:30:00-06:00"}, + ["SUMMARY:Cheese", "DUE:20231118T143000Z"], + "1", + {**RESULT_ITEM, "due": "2023-11-18T08:30:00-06:00"}, + ), + ( + {"description": "Make sure to get Swiss"}, + ["SUMMARY:Cheese", "DESCRIPTION:Make sure to get Swiss"], + "1", + {**RESULT_ITEM, "description": "Make sure to get Swiss"}, + ), + ], + ids=[ + "rename", + "status_needs_action", + "status_completed", + "rename_status", + "due_date", + "due_datetime", + "description", + ], +) +async def test_update_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + dav_client: Mock, + calendar: Mock, + update_data: dict[str, Any], + expected_ics: list[str], + expected_state: str, + expected_item: dict[str, Any], +) -> None: + """Test updating an item on the list.""" + + item = Todo(dav_client, None, TODO_NEEDS_ACTION, calendar, "2") + calendar.search = MagicMock(return_value=[item]) + + await config_entry.async_setup(hass) + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == "1" + + calendar.todo_by_uid = MagicMock(return_value=item) + + dav_client.put.return_value.status = 204 + + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + { + "item": "Cheese", + **update_data, + }, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + assert dav_client.put.call_args + ics = dav_client.put.call_args.args[1] + for expected in expected_ics: + assert expected in ics + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == expected_state + + result = await hass.services.async_call( + TODO_DOMAIN, + "get_items", + {}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + return_response=True, + ) + assert result == {TEST_ENTITY: {"items": [expected_item]}} + + +async def test_update_item_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + dav_client: Mock, + calendar: Mock, +) -> None: + """Test failure when updating an item on the list.""" + + item = Todo(dav_client, None, TODO_NEEDS_ACTION, calendar, "2") + calendar.search = MagicMock(return_value=[item]) + + await config_entry.async_setup(hass) + + calendar.todo_by_uid = MagicMock(return_value=item) + dav_client.put.side_effect = DAVError() + + with pytest.raises(HomeAssistantError, match="CalDAV save error"): + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + { + "item": "Cheese", + "status": "completed", + }, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("side_effect", "match"), + [(DAVError, "CalDAV lookup error"), (NotFoundError, "Could not find")], +) +async def test_update_item_lookup_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + dav_client: Mock, + calendar: Mock, + side_effect: Any, + match: str, +) -> None: + """Test failure when looking up an item to update.""" + + item = Todo(dav_client, None, TODO_NEEDS_ACTION, calendar, "2") + calendar.search = MagicMock(return_value=[item]) + + await config_entry.async_setup(hass) + + calendar.todo_by_uid.side_effect = side_effect + + with pytest.raises(HomeAssistantError, match=match): + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + { + "item": "Cheese", + "status": "completed", + }, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("uids_to_delete", "expect_item1_delete_called", "expect_item2_delete_called"), + [ + ([], False, False), + (["Cheese"], True, False), + (["Wine"], False, True), + (["Wine", "Cheese"], True, True), + ], + ids=("none", "item1-only", "item2-only", "both-items"), +) +async def test_remove_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, + dav_client: Mock, + calendar: Mock, + uids_to_delete: list[str], + expect_item1_delete_called: bool, + expect_item2_delete_called: bool, +) -> None: + """Test removing an item on the list.""" + + item1 = Todo(dav_client, None, TODO_NEEDS_ACTION, calendar, "2") + item2 = Todo(dav_client, None, TODO_COMPLETED, calendar, "3") + calendar.search = MagicMock(return_value=[item1, item2]) + + await config_entry.async_setup(hass) + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == "1" + + def lookup(uid: str) -> Mock: + assert uid == "2" or uid == "3" + if uid == "2": + return item1 + return item2 + + calendar.todo_by_uid = Mock(side_effect=lookup) + item1.delete = Mock() + item2.delete = Mock() + + await hass.services.async_call( + TODO_DOMAIN, + "remove_item", + {"item": uids_to_delete}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + assert item1.delete.called == expect_item1_delete_called + assert item2.delete.called == expect_item2_delete_called + + +@pytest.mark.parametrize( + ("todos", "side_effect", "match"), + [ + ([TODO_NEEDS_ACTION], DAVError, "CalDAV lookup error"), + ([TODO_NEEDS_ACTION], NotFoundError, "Could not find"), + ], +) +async def test_remove_item_lookup_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + calendar: Mock, + side_effect: Any, + match: str, +) -> None: + """Test failure while removing an item from the list.""" + + await config_entry.async_setup(hass) + + calendar.todo_by_uid.side_effect = side_effect + + with pytest.raises(HomeAssistantError, match=match): + await hass.services.async_call( + TODO_DOMAIN, + "remove_item", + {"item": "Cheese"}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + +async def test_remove_item_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + dav_client: Mock, + calendar: Mock, +) -> None: + """Test removing an item on the list.""" + + item = Todo(dav_client, "2.ics", TODO_NEEDS_ACTION, calendar, "2") + calendar.search = MagicMock(return_value=[item]) + + await config_entry.async_setup(hass) + + def lookup(uid: str) -> Mock: + return item + + calendar.todo_by_uid = Mock(side_effect=lookup) + dav_client.delete.return_value.status = 500 + + with pytest.raises(HomeAssistantError, match="CalDAV delete error"): + await hass.services.async_call( + TODO_DOMAIN, + "remove_item", + {"item": "Cheese"}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + +async def test_remove_item_not_found( + hass: HomeAssistant, + config_entry: MockConfigEntry, + dav_client: Mock, + calendar: Mock, +) -> None: + """Test removing an item on the list.""" + + item = Todo(dav_client, "2.ics", TODO_NEEDS_ACTION, calendar, "2") + calendar.search = MagicMock(return_value=[item]) + + await config_entry.async_setup(hass) + + def lookup(uid: str) -> Mock: + return item + + calendar.todo_by_uid.side_effect = NotFoundError() + + with pytest.raises(HomeAssistantError, match="Could not find"): + await hass.services.async_call( + TODO_DOMAIN, + "remove_item", + {"item": "Cheese"}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + +async def test_subscribe( + hass: HomeAssistant, + config_entry: MockConfigEntry, + dav_client: Mock, + calendar: Mock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test subscription to item updates.""" + + item = Todo(dav_client, None, TODO_NEEDS_ACTION, calendar, "2") + calendar.search = MagicMock(return_value=[item]) + + await config_entry.async_setup(hass) + + # Subscribe and get the initial list + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "todo/item/subscribe", + "entity_id": TEST_ENTITY, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + subscription_id = msg["id"] + + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "Cheese" + assert items[0]["status"] == "needs_action" + assert items[0]["uid"] + + calendar.todo_by_uid = MagicMock(return_value=item) + dav_client.put.return_value.status = 204 + # Reflect update for state refresh after update + calendar.search.return_value = [ + Todo( + dav_client, None, TODO_NEEDS_ACTION.replace("Cheese", "Milk"), calendar, "2" + ) + ] + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + { + "item": "Cheese", + "rename": "Milk", + }, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + # Verify update is published + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "Milk" + assert items[0]["status"] == "needs_action" + assert items[0]["uid"] diff --git a/tests/components/calendar/snapshots/test_init.ambr b/tests/components/calendar/snapshots/test_init.ambr index 7d48228193a..67e8839f7a5 100644 --- a/tests/components/calendar/snapshots/test_init.ambr +++ b/tests/components/calendar/snapshots/test_init.ambr @@ -1,11 +1,34 @@ # serializer version: 1 -# name: test_list_events_service_duration[calendar.calendar_1-00:15:00] +# name: test_list_events_service_duration[calendar.calendar_1-00:15:00-get_events] + dict({ + 'calendar.calendar_1': dict({ + 'events': list([ + ]), + }), + }) +# --- +# name: test_list_events_service_duration[calendar.calendar_1-00:15:00-list_events] dict({ 'events': list([ ]), }) # --- -# name: test_list_events_service_duration[calendar.calendar_1-01:00:00] +# name: test_list_events_service_duration[calendar.calendar_1-01:00:00-get_events] + dict({ + 'calendar.calendar_1': dict({ + 'events': list([ + dict({ + 'description': 'Future Description', + 'end': '2023-10-19T08:20:05-07:00', + 'location': 'Future Location', + 'start': '2023-10-19T07:20:05-07:00', + 'summary': 'Future Event', + }), + ]), + }), + }) +# --- +# name: test_list_events_service_duration[calendar.calendar_1-01:00:00-list_events] dict({ 'events': list([ dict({ @@ -18,7 +41,20 @@ ]), }) # --- -# name: test_list_events_service_duration[calendar.calendar_2-00:15:00] +# name: test_list_events_service_duration[calendar.calendar_2-00:15:00-get_events] + dict({ + 'calendar.calendar_2': dict({ + 'events': list([ + dict({ + 'end': '2023-10-19T07:20:05-07:00', + 'start': '2023-10-19T06:20:05-07:00', + 'summary': 'Current Event', + }), + ]), + }), + }) +# --- +# name: test_list_events_service_duration[calendar.calendar_2-00:15:00-list_events] dict({ 'events': list([ dict({ diff --git a/tests/components/calendar/test_init.py b/tests/components/calendar/test_init.py index ad83d039d73..25804287172 100644 --- a/tests/components/calendar/test_init.py +++ b/tests/components/calendar/test_init.py @@ -12,9 +12,14 @@ from syrupy.assertion import SnapshotAssertion import voluptuous as vol from homeassistant.bootstrap import async_setup_component -from homeassistant.components.calendar import DOMAIN, SERVICE_LIST_EVENTS +from homeassistant.components.calendar import ( + DOMAIN, + LEGACY_SERVICE_LIST_EVENTS, + SERVICE_GET_EVENTS, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.issue_registry import IssueRegistry import homeassistant.util.dt as dt_util from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -389,6 +394,41 @@ async def test_create_event_service_invalid_params( @freeze_time("2023-06-22 10:30:00+00:00") +@pytest.mark.parametrize( + ("service", "expected"), + [ + ( + LEGACY_SERVICE_LIST_EVENTS, + { + "events": [ + { + "start": "2023-06-22T05:00:00-06:00", + "end": "2023-06-22T06:00:00-06:00", + "summary": "Future Event", + "description": "Future Description", + "location": "Future Location", + } + ] + }, + ), + ( + SERVICE_GET_EVENTS, + { + "calendar.calendar_1": { + "events": [ + { + "start": "2023-06-22T05:00:00-06:00", + "end": "2023-06-22T06:00:00-06:00", + "summary": "Future Event", + "description": "Future Description", + "location": "Future Location", + } + ] + } + }, + ), + ], +) @pytest.mark.parametrize( ("start_time", "end_time"), [ @@ -402,6 +442,8 @@ async def test_list_events_service( set_time_zone: None, start_time: str, end_time: str, + service: str, + expected: dict[str, Any], ) -> None: """Test listing events from the service call using exlplicit start and end time. @@ -414,8 +456,9 @@ async def test_list_events_service( response = await hass.services.async_call( DOMAIN, - SERVICE_LIST_EVENTS, - { + service, + target={"entity_id": ["calendar.calendar_1"]}, + service_data={ "entity_id": "calendar.calendar_1", "start_date_time": start_time, "end_date_time": end_time, @@ -423,19 +466,16 @@ async def test_list_events_service( blocking=True, return_response=True, ) - assert response == { - "events": [ - { - "start": "2023-06-22T05:00:00-06:00", - "end": "2023-06-22T06:00:00-06:00", - "summary": "Future Event", - "description": "Future Description", - "location": "Future Location", - } - ] - } + assert response == expected +@pytest.mark.parametrize( + ("service"), + [ + (LEGACY_SERVICE_LIST_EVENTS), + SERVICE_GET_EVENTS, + ], +) @pytest.mark.parametrize( ("entity", "duration"), [ @@ -452,6 +492,7 @@ async def test_list_events_service_duration( hass: HomeAssistant, entity: str, duration: str, + service: str, snapshot: SnapshotAssertion, ) -> None: """Test listing events using a time duration.""" @@ -460,7 +501,7 @@ async def test_list_events_service_duration( response = await hass.services.async_call( DOMAIN, - SERVICE_LIST_EVENTS, + service, { "entity_id": entity, "duration": duration, @@ -479,7 +520,7 @@ async def test_list_events_positive_duration(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid, match="should be positive"): await hass.services.async_call( DOMAIN, - SERVICE_LIST_EVENTS, + SERVICE_GET_EVENTS, { "entity_id": "calendar.calendar_1", "duration": "-01:00:00", @@ -499,7 +540,7 @@ async def test_list_events_exclusive_fields(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid, match="at most one of"): await hass.services.async_call( DOMAIN, - SERVICE_LIST_EVENTS, + SERVICE_GET_EVENTS, { "entity_id": "calendar.calendar_1", "end_date_time": end, @@ -518,10 +559,47 @@ async def test_list_events_missing_fields(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid, match="at least one of"): await hass.services.async_call( DOMAIN, - SERVICE_LIST_EVENTS, + SERVICE_GET_EVENTS, { "entity_id": "calendar.calendar_1", }, blocking=True, return_response=True, ) + + +async def test_issue_deprecated_service_calendar_list_events( + hass: HomeAssistant, + issue_registry: IssueRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the issue is raised on deprecated service weather.get_forecast.""" + + await async_setup_component(hass, "calendar", {"calendar": {"platform": "demo"}}) + await hass.async_block_till_done() + + _ = await hass.services.async_call( + DOMAIN, + LEGACY_SERVICE_LIST_EVENTS, + target={"entity_id": ["calendar.calendar_1"]}, + service_data={ + "entity_id": "calendar.calendar_1", + "duration": "01:00:00", + }, + blocking=True, + return_response=True, + ) + + issue = issue_registry.async_get_issue( + "calendar", "deprecated_service_calendar_list_events" + ) + assert issue + assert issue.issue_domain == "demo" + assert issue.issue_id == "deprecated_service_calendar_list_events" + assert issue.translation_key == "deprecated_service_calendar_list_events" + + assert ( + "Detected use of service 'calendar.list_events'. " + "This is deprecated and will stop working in Home Assistant 2024.6. " + "Use 'calendar.get_events' instead which supports multiple entities" + ) in caplog.text diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 2d688489d39..9b5c2d56d4c 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -19,7 +19,7 @@ async def test_creating_entry_sets_up_media_player(hass: HomeAssistant) -> None: ) as mock_setup, patch( "pychromecast.discovery.discover_chromecasts", return_value=(True, None) ), patch( - "pychromecast.discovery.stop_discovery" + "pychromecast.discovery.stop_discovery", ): result = await hass.config_entries.flow.async_init( cast.DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/climate/test_intent.py b/tests/components/climate/test_intent.py new file mode 100644 index 00000000000..eaf7029d303 --- /dev/null +++ b/tests/components/climate/test_intent.py @@ -0,0 +1,221 @@ +"""Test climate intents.""" +from collections.abc import Generator +from unittest.mock import patch + +import pytest + +from homeassistant.components.climate import ( + DOMAIN, + ClimateEntity, + HVACMode, + intent as climate_intent, +) +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.const import Platform, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers import area_registry as ar, entity_registry as er, intent +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, + mock_config_flow, + mock_integration, + mock_platform, +) + +TEST_DOMAIN = "test" + + +class MockFlow(ConfigFlow): + """Test flow.""" + + +@pytest.fixture(autouse=True) +def config_flow_fixture(hass: HomeAssistant) -> Generator[None, None, None]: + """Mock config flow.""" + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + + with mock_config_flow(TEST_DOMAIN, MockFlow): + yield + + +@pytest.fixture(autouse=True) +def mock_setup_integration(hass: HomeAssistant) -> None: + """Fixture to set up a mock integration.""" + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setup(config_entry, DOMAIN) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, + config_entry: ConfigEntry, + ) -> bool: + await hass.config_entries.async_unload_platforms(config_entry, [Platform.TODO]) + return True + + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + + +async def create_mock_platform( + hass: HomeAssistant, + entities: list[ClimateEntity], +) -> MockConfigEntry: + """Create a todo platform with the specified entities.""" + + async def async_setup_entry_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test event platform via config entry.""" + async_add_entities(entities) + + mock_platform( + hass, + f"{TEST_DOMAIN}.{DOMAIN}", + MockPlatform(async_setup_entry=async_setup_entry_platform), + ) + + config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry + + +class MockClimateEntity(ClimateEntity): + """Mock Climate device to use in tests.""" + + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_hvac_mode = HVACMode.OFF + _attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT] + + +async def test_get_temperature( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test HassClimateGetTemperature intent.""" + await climate_intent.async_setup_intents(hass) + + climate_1 = MockClimateEntity() + climate_1._attr_name = "Climate 1" + climate_1._attr_unique_id = "1234" + climate_1._attr_current_temperature = 10.0 + entity_registry.async_get_or_create( + DOMAIN, "test", "1234", suggested_object_id="climate_1" + ) + + climate_2 = MockClimateEntity() + climate_2._attr_name = "Climate 2" + climate_2._attr_unique_id = "5678" + climate_2._attr_current_temperature = 22.0 + entity_registry.async_get_or_create( + DOMAIN, "test", "5678", suggested_object_id="climate_2" + ) + + await create_mock_platform(hass, [climate_1, climate_2]) + + # Add climate entities to different areas: + # climate_1 => living room + # climate_2 => bedroom + living_room_area = area_registry.async_create(name="Living Room") + bedroom_area = area_registry.async_create(name="Bedroom") + + entity_registry.async_update_entity( + climate_1.entity_id, area_id=living_room_area.id + ) + entity_registry.async_update_entity(climate_2.entity_id, area_id=bedroom_area.id) + + # First climate entity will be selected (no area) + response = await intent.async_handle( + hass, "test", climate_intent.INTENT_GET_TEMPERATURE, {} + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(response.matched_states) == 1 + assert response.matched_states[0].entity_id == climate_1.entity_id + state = response.matched_states[0] + assert state.attributes["current_temperature"] == 10.0 + + # Select by area instead (climate_2) + response = await intent.async_handle( + hass, + "test", + climate_intent.INTENT_GET_TEMPERATURE, + {"area": {"value": "Bedroom"}}, + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(response.matched_states) == 1 + assert response.matched_states[0].entity_id == climate_2.entity_id + state = response.matched_states[0] + assert state.attributes["current_temperature"] == 22.0 + + +async def test_get_temperature_no_entities( + hass: HomeAssistant, +) -> None: + """Test HassClimateGetTemperature intent with no climate entities.""" + await climate_intent.async_setup_intents(hass) + + await create_mock_platform(hass, []) + + with pytest.raises(intent.IntentHandleError): + await intent.async_handle( + hass, "test", climate_intent.INTENT_GET_TEMPERATURE, {} + ) + + +async def test_get_temperature_no_state( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test HassClimateGetTemperature intent when states are missing.""" + await climate_intent.async_setup_intents(hass) + + climate_1 = MockClimateEntity() + climate_1._attr_name = "Climate 1" + climate_1._attr_unique_id = "1234" + entity_registry.async_get_or_create( + DOMAIN, "test", "1234", suggested_object_id="climate_1" + ) + + await create_mock_platform(hass, [climate_1]) + + living_room_area = area_registry.async_create(name="Living Room") + entity_registry.async_update_entity( + climate_1.entity_id, area_id=living_room_area.id + ) + + with patch("homeassistant.core.StateMachine.get", return_value=None), pytest.raises( + intent.IntentHandleError + ): + await intent.async_handle( + hass, "test", climate_intent.INTENT_GET_TEMPERATURE, {} + ) + + with patch( + "homeassistant.core.StateMachine.async_all", return_value=[] + ), pytest.raises(intent.IntentHandleError): + await intent.async_handle( + hass, + "test", + climate_intent.INTENT_GET_TEMPERATURE, + {"area": {"value": "Living Room"}}, + ) diff --git a/tests/components/co2signal/__init__.py b/tests/components/co2signal/__init__.py index 1f3d6a83c05..65764d75fe4 100644 --- a/tests/components/co2signal/__init__.py +++ b/tests/components/co2signal/__init__.py @@ -1,11 +1,18 @@ """Tests for the CO2 Signal integration.""" +from aioelectricitymaps.models import ( + CarbonIntensityData, + CarbonIntensityResponse, + CarbonIntensityUnit, +) -VALID_PAYLOAD = { - "status": "ok", - "countryCode": "FR", - "data": { - "carbonIntensity": 45.98623190095805, - "fossilFuelPercentage": 5.461182741937103, - }, - "units": {"carbonIntensity": "gCO2eq/kWh"}, -} +VALID_RESPONSE = CarbonIntensityResponse( + status="ok", + country_code="FR", + data=CarbonIntensityData( + carbon_intensity=45.98623190095805, + fossil_fuel_percentage=5.461182741937103, + ), + units=CarbonIntensityUnit( + carbon_intensity="gCO2eq/kWh", + ), +) diff --git a/tests/components/co2signal/conftest.py b/tests/components/co2signal/conftest.py new file mode 100644 index 00000000000..8eb0116bc88 --- /dev/null +++ b/tests/components/co2signal/conftest.py @@ -0,0 +1,52 @@ +"""Fixtures for Electricity maps integration tests.""" +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from homeassistant.components.co2signal import DOMAIN +from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.components.co2signal import VALID_RESPONSE + + +@pytest.fixture(name="electricity_maps") +def mock_electricity_maps() -> Generator[None, MagicMock, None]: + """Mock the ElectricityMaps client.""" + + with patch( + "homeassistant.components.co2signal.ElectricityMaps", + autospec=True, + ) as electricity_maps, patch( + "homeassistant.components.co2signal.config_flow.ElectricityMaps", + new=electricity_maps, + ): + client = electricity_maps.return_value + client.latest_carbon_intensity_by_coordinates.return_value = VALID_RESPONSE + client.latest_carbon_intensity_by_country_code.return_value = VALID_RESPONSE + + yield client + + +@pytest.fixture(name="config_entry") +async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return a MockConfigEntry for testing.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_API_KEY: "api_key", "location": ""}, + entry_id="904a74160aa6f335526706bee85dfb83", + ) + + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, electricity_maps: AsyncMock +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() diff --git a/tests/components/co2signal/snapshots/test_diagnostics.ambr b/tests/components/co2signal/snapshots/test_diagnostics.ambr index ffb35edfbbb..53a0f000f28 100644 --- a/tests/components/co2signal/snapshots/test_diagnostics.ambr +++ b/tests/components/co2signal/snapshots/test_diagnostics.ambr @@ -19,14 +19,14 @@ 'version': 1, }), 'data': dict({ - 'countryCode': 'FR', + 'country_code': 'FR', 'data': dict({ - 'carbonIntensity': 45.98623190095805, - 'fossilFuelPercentage': 5.461182741937103, + 'carbon_intensity': 45.98623190095805, + 'fossil_fuel_percentage': 5.461182741937103, }), 'status': 'ok', 'units': dict({ - 'carbonIntensity': 'gCO2eq/kWh', + 'carbon_intensity': 'gCO2eq/kWh', }), }), }) diff --git a/tests/components/co2signal/snapshots/test_sensor.ambr b/tests/components/co2signal/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..eb4364ed0d6 --- /dev/null +++ b/tests/components/co2signal/snapshots/test_sensor.ambr @@ -0,0 +1,101 @@ +# serializer version: 1 +# name: test_sensor[sensor.electricity_maps_co2_intensity] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.electricity_maps_co2_intensity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:molecule-co2', + 'original_name': 'CO2 intensity', + 'platform': 'co2signal', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'carbon_intensity', + 'unique_id': '904a74160aa6f335526706bee85dfb83_co2intensity', + 'unit_of_measurement': 'gCO2eq/kWh', + }) +# --- +# name: test_sensor[sensor.electricity_maps_co2_intensity].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Electricity Maps', + 'country_code': 'FR', + 'friendly_name': 'Electricity Maps CO2 intensity', + 'icon': 'mdi:molecule-co2', + 'state_class': , + 'unit_of_measurement': 'gCO2eq/kWh', + }), + 'context': , + 'entity_id': 'sensor.electricity_maps_co2_intensity', + 'last_changed': , + 'last_updated': , + 'state': '45.9862319009581', + }) +# --- +# name: test_sensor[sensor.electricity_maps_grid_fossil_fuel_percentage] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.electricity_maps_grid_fossil_fuel_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:molecule-co2', + 'original_name': 'Grid fossil fuel percentage', + 'platform': 'co2signal', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fossil_fuel_percentage', + 'unique_id': '904a74160aa6f335526706bee85dfb83_fossilFuelPercentage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.electricity_maps_grid_fossil_fuel_percentage].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Electricity Maps', + 'country_code': 'FR', + 'friendly_name': 'Electricity Maps Grid fossil fuel percentage', + 'icon': 'mdi:molecule-co2', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.electricity_maps_grid_fossil_fuel_percentage', + 'last_changed': , + 'last_updated': , + 'state': '5.4611827419371', + }) +# --- diff --git a/tests/components/co2signal/test_config_flow.py b/tests/components/co2signal/test_config_flow.py index 879293ae959..5b1ade1ee49 100644 --- a/tests/components/co2signal/test_config_flow.py +++ b/tests/components/co2signal/test_config_flow.py @@ -1,17 +1,23 @@ """Test the CO2 Signal config flow.""" -from json import JSONDecodeError -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, patch +from aioelectricitymaps.exceptions import ( + ElectricityMapsDecodeError, + ElectricityMapsError, + InvalidToken, +) import pytest from homeassistant import config_entries from homeassistant.components.co2signal import DOMAIN, config_flow +from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import VALID_PAYLOAD +from tests.common import MockConfigEntry +@pytest.mark.usefixtures("electricity_maps") async def test_form_home(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -22,9 +28,6 @@ async def test_form_home(hass: HomeAssistant) -> None: assert result["errors"] is None with patch( - "CO2Signal.get_latest", - return_value=VALID_PAYLOAD, - ), patch( "homeassistant.components.co2signal.async_setup_entry", return_value=True, ) as mock_setup_entry: @@ -45,6 +48,7 @@ async def test_form_home(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("electricity_maps") async def test_form_coordinates(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -64,9 +68,6 @@ async def test_form_coordinates(hass: HomeAssistant) -> None: assert result2["type"] == FlowResultType.FORM with patch( - "CO2Signal.get_latest", - return_value=VALID_PAYLOAD, - ), patch( "homeassistant.components.co2signal.async_setup_entry", return_value=True, ) as mock_setup_entry: @@ -89,6 +90,7 @@ async def test_form_coordinates(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("electricity_maps") async def test_form_country(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -108,9 +110,6 @@ async def test_form_country(hass: HomeAssistant) -> None: assert result2["type"] == FlowResultType.FORM with patch( - "CO2Signal.get_latest", - return_value=VALID_PAYLOAD, - ), patch( "homeassistant.components.co2signal.async_setup_entry", return_value=True, ) as mock_setup_entry: @@ -135,65 +134,95 @@ async def test_form_country(hass: HomeAssistant) -> None: ("side_effect", "err_code"), [ ( - ValueError("Invalid authentication credentials"), + InvalidToken, "invalid_auth", ), - ( - ValueError("API rate limit exceeded."), - "api_ratelimit", - ), - (ValueError("Something else"), "unknown"), - (JSONDecodeError(msg="boom", doc="", pos=1), "unknown"), - (Exception("Boom"), "unknown"), - (Mock(return_value={"error": "boom"}), "unknown"), - (Mock(return_value={"status": "error"}), "unknown"), + (ElectricityMapsError("Something else"), "unknown"), + (ElectricityMapsDecodeError("Boom"), "unknown"), ], ids=[ "invalid auth", - "rate limit exceeded", - "unknown value error", + "generic error", "json decode error", - "unknown error", - "error in json dict", - "status error", ], ) -async def test_form_error_handling(hass: HomeAssistant, side_effect, err_code) -> None: +async def test_form_error_handling( + hass: HomeAssistant, + electricity_maps: AsyncMock, + side_effect: Exception, + err_code: str, +) -> None: """Test we handle expected errors.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "CO2Signal.get_latest", - side_effect=side_effect, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "location": config_flow.TYPE_USE_HOME, - "api_key": "api_key", - }, - ) + electricity_maps.latest_carbon_intensity_by_coordinates.side_effect = side_effect + electricity_maps.latest_carbon_intensity_by_country_code.side_effect = side_effect + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "location": config_flow.TYPE_USE_HOME, + "api_key": "api_key", + }, + ) assert result["type"] == FlowResultType.FORM assert result["errors"] == {"base": err_code} - with patch( - "CO2Signal.get_latest", - return_value=VALID_PAYLOAD, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "location": config_flow.TYPE_USE_HOME, - "api_key": "api_key", - }, - ) - await hass.async_block_till_done() + # reset mock and test if now succeeds + electricity_maps.latest_carbon_intensity_by_coordinates.side_effect = None + electricity_maps.latest_carbon_intensity_by_country_code.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "location": config_flow.TYPE_USE_HOME, + "api_key": "api_key", + }, + ) + await hass.async_block_till_done() assert result["type"] == FlowResultType.CREATE_ENTRY assert result["title"] == "CO2 Signal" assert result["data"] == { "api_key": "api_key", } + + +async def test_reauth( + hass: HomeAssistant, + config_entry: MockConfigEntry, + electricity_maps: AsyncMock, +) -> None: + """Test reauth flow.""" + config_entry.add_to_hass(hass) + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=None, + ) + + assert init_result["type"] == FlowResultType.FORM + assert init_result["step_id"] == "reauth" + + with patch( + "homeassistant.components.co2signal.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + configure_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + { + CONF_API_KEY: "api_key2", + }, + ) + await hass.async_block_till_done() + + assert configure_result["type"] == FlowResultType.ABORT + assert configure_result["reason"] == "reauth_successful" + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/co2signal/test_diagnostics.py b/tests/components/co2signal/test_diagnostics.py index ed73cb960b5..edc0007952b 100644 --- a/tests/components/co2signal/test_diagnostics.py +++ b/tests/components/co2signal/test_diagnostics.py @@ -1,35 +1,23 @@ """Test the CO2Signal diagnostics.""" -from unittest.mock import patch +import pytest from syrupy import SnapshotAssertion -from homeassistant.components.co2signal import DOMAIN -from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component - -from . import VALID_PAYLOAD from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +@pytest.mark.usefixtures("setup_integration") async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, + config_entry: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data={CONF_API_KEY: "api_key", "location": ""}, - entry_id="904a74160aa6f335526706bee85dfb83", - ) - config_entry.add_to_hass(hass) - with patch("CO2Signal.get_latest", return_value=VALID_PAYLOAD): - assert await async_setup_component(hass, DOMAIN, {}) - result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) assert result == snapshot diff --git a/tests/components/co2signal/test_sensor.py b/tests/components/co2signal/test_sensor.py new file mode 100644 index 00000000000..b79c8e04c23 --- /dev/null +++ b/tests/components/co2signal/test_sensor.py @@ -0,0 +1,105 @@ +"""Tests Electricity Maps sensor platform.""" +from datetime import timedelta +from unittest.mock import AsyncMock + +from aioelectricitymaps.exceptions import ( + ElectricityMapsDecodeError, + ElectricityMapsError, + InvalidToken, +) +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import async_fire_time_changed + + +@pytest.mark.parametrize( + "entity_name", + [ + "sensor.electricity_maps_co2_intensity", + "sensor.electricity_maps_grid_fossil_fuel_percentage", + ], +) +@pytest.mark.usefixtures("setup_integration") +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entity_name: str, + snapshot: SnapshotAssertion, +) -> None: + """Test sensor setup and update.""" + assert (entry := entity_registry.async_get(entity_name)) + assert entry == snapshot + + assert (state := hass.states.get(entity_name)) + assert state == snapshot + + +@pytest.mark.parametrize( + "error", + [ + ElectricityMapsDecodeError, + ElectricityMapsError, + Exception, + ], +) +@pytest.mark.usefixtures("setup_integration") +async def test_sensor_update_fail( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + electricity_maps: AsyncMock, + error: Exception, +) -> None: + """Test sensor error handling.""" + assert (state := hass.states.get("sensor.electricity_maps_co2_intensity")) + assert state.state == "45.9862319009581" + assert len(electricity_maps.mock_calls) == 1 + + electricity_maps.latest_carbon_intensity_by_coordinates.side_effect = error + electricity_maps.latest_carbon_intensity_by_country_code.side_effect = error + + freezer.tick(timedelta(minutes=20)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (state := hass.states.get("sensor.electricity_maps_co2_intensity")) + assert state.state == "unavailable" + assert len(electricity_maps.mock_calls) == 2 + + # reset mock and test if entity is available again + electricity_maps.latest_carbon_intensity_by_coordinates.side_effect = None + electricity_maps.latest_carbon_intensity_by_country_code.side_effect = None + + freezer.tick(timedelta(minutes=20)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (state := hass.states.get("sensor.electricity_maps_co2_intensity")) + assert state.state == "45.9862319009581" + assert len(electricity_maps.mock_calls) == 3 + + +@pytest.mark.usefixtures("setup_integration") +async def test_sensor_reauth_triggered( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + electricity_maps: AsyncMock, +): + """Test if reauth flow is triggered.""" + assert (state := hass.states.get("sensor.electricity_maps_co2_intensity")) + assert state.state == "45.9862319009581" + + electricity_maps.latest_carbon_intensity_by_coordinates.side_effect = InvalidToken + electricity_maps.latest_carbon_intensity_by_country_code.side_effect = InvalidToken + + freezer.tick(timedelta(minutes=20)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (flows := hass.config_entries.flow.async_progress()) + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth" diff --git a/tests/components/coinbase/common.py b/tests/components/coinbase/common.py index 6ab33f3bc7c..0f8930dbeff 100644 --- a/tests/components/coinbase/common.py +++ b/tests/components/coinbase/common.py @@ -6,7 +6,12 @@ from homeassistant.components.coinbase.const import ( ) from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN -from .const import GOOD_EXCHANGE_RATE, GOOD_EXCHANGE_RATE_2, MOCK_ACCOUNTS_RESPONSE +from .const import ( + GOOD_CURRENCY_2, + GOOD_EXCHANGE_RATE, + GOOD_EXCHANGE_RATE_2, + MOCK_ACCOUNTS_RESPONSE, +) from tests.common import MockConfigEntry @@ -60,7 +65,11 @@ def mock_get_exchange_rates(): """Return a heavily reduced mock list of exchange rates for testing.""" return { "currency": "USD", - "rates": {GOOD_EXCHANGE_RATE_2: "0.109", GOOD_EXCHANGE_RATE: "0.00002"}, + "rates": { + GOOD_CURRENCY_2: "1.0", + GOOD_EXCHANGE_RATE_2: "0.109", + GOOD_EXCHANGE_RATE: "0.00002", + }, } diff --git a/tests/components/coinbase/const.py b/tests/components/coinbase/const.py index 2b437e15478..138b941c62c 100644 --- a/tests/components/coinbase/const.py +++ b/tests/components/coinbase/const.py @@ -12,26 +12,23 @@ BAD_EXCHANGE_RATE = "ETH" MOCK_ACCOUNTS_RESPONSE = [ { "balance": {"amount": "0.00001", "currency": GOOD_CURRENCY}, - "currency": GOOD_CURRENCY, + "currency": {"code": GOOD_CURRENCY}, "id": "123456789", "name": "BTC Wallet", - "native_balance": {"amount": "100.12", "currency": GOOD_CURRENCY_2}, "type": "wallet", }, { "balance": {"amount": "100.00", "currency": GOOD_CURRENCY}, - "currency": GOOD_CURRENCY, + "currency": {"code": GOOD_CURRENCY}, "id": "abcdefg", "name": "BTC Vault", - "native_balance": {"amount": "100.12", "currency": GOOD_CURRENCY_2}, "type": "vault", }, { "balance": {"amount": "9.90", "currency": GOOD_CURRENCY_2}, - "currency": "USD", + "currency": {"code": GOOD_CURRENCY_2}, "id": "987654321", "name": "USD Wallet", - "native_balance": {"amount": "9.90", "currency": GOOD_CURRENCY_2}, "type": "fiat", }, ] diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index c214330d5f9..38224a9992f 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -7,13 +7,11 @@ 'amount': '**REDACTED**', 'currency': 'BTC', }), - 'currency': 'BTC', + 'currency': dict({ + 'code': 'BTC', + }), 'id': '**REDACTED**', 'name': 'BTC Wallet', - 'native_balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'USD', - }), 'type': 'wallet', }), dict({ @@ -21,13 +19,11 @@ 'amount': '**REDACTED**', 'currency': 'BTC', }), - 'currency': 'BTC', + 'currency': dict({ + 'code': 'BTC', + }), 'id': '**REDACTED**', 'name': 'BTC Vault', - 'native_balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'USD', - }), 'type': 'vault', }), dict({ @@ -35,13 +31,11 @@ 'amount': '**REDACTED**', 'currency': 'USD', }), - 'currency': 'USD', + 'currency': dict({ + 'code': 'USD', + }), 'id': '**REDACTED**', 'name': 'USD Wallet', - 'native_balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'USD', - }), 'type': 'fiat', }), ]), diff --git a/tests/components/comelit/test_config_flow.py b/tests/components/comelit/test_config_flow.py index f2d59f46114..dd15eca05cd 100644 --- a/tests/components/comelit/test_config_flow.py +++ b/tests/components/comelit/test_config_flow.py @@ -24,7 +24,7 @@ async def test_user(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.comelit.async_setup_entry" ) as mock_setup_entry, patch( - "requests.get" + "requests.get", ) as mock_request_get: mock_request_get.return_value.status_code = 200 @@ -70,7 +70,7 @@ async def test_exception_connection(hass: HomeAssistant, side_effect, error) -> ), patch( "aiocomelit.api.ComeliteSerialBridgeApi.logout", ), patch( - "homeassistant.components.comelit.async_setup_entry" + "homeassistant.components.comelit.async_setup_entry", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_USER_DATA @@ -135,9 +135,7 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> "aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect ), patch( "aiocomelit.api.ComeliteSerialBridgeApi.logout", - ), patch( - "homeassistant.components.comelit.async_setup_entry" - ): + ), patch("homeassistant.components.comelit.async_setup_entry"): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, diff --git a/tests/components/config/test_automation.py b/tests/components/config/test_automation.py index ad4c7e90851..1a099c05b16 100644 --- a/tests/components/config/test_automation.py +++ b/tests/components/config/test_automation.py @@ -23,7 +23,9 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture async def setup_automation( - hass, automation_config, stub_blueprint_populate # noqa: F811 + hass, + automation_config, + stub_blueprint_populate, # noqa: F811 ): """Set up automation integration.""" assert await async_setup_component( diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 3cc7ada49ba..bfee7551cff 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -19,8 +19,8 @@ from tests.common import ( MockConfigEntry, MockModule, MockUser, - mock_entity_platform, mock_integration, + mock_platform, ) from tests.typing import WebSocketGenerator @@ -304,7 +304,7 @@ async def test_reload_entry_in_setup_retry( async_migrate_entry=mock_migrate_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) entry = MockConfigEntry(domain="comp", state=core_ce.ConfigEntryState.SETUP_RETRY) entry.supports_unload = True entry.add_to_hass(hass) @@ -353,7 +353,7 @@ async def test_available_flows( async def test_initialize_flow(hass: HomeAssistant, client) -> None: """Test we can initialize a flow.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): async def async_step_user(self, user_input=None): @@ -402,7 +402,7 @@ async def test_initialize_flow(hass: HomeAssistant, client) -> None: async def test_initialize_flow_unmet_dependency(hass: HomeAssistant, client) -> None: """Test unmet dependencies are listed.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) config_schema = vol.Schema({"comp_conf": {"hello": str}}, required=True) mock_integration( @@ -458,7 +458,7 @@ async def test_initialize_flow_unauth( async def test_abort(hass: HomeAssistant, client) -> None: """Test a flow that aborts.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): async def async_step_user(self, user_input=None): @@ -484,7 +484,7 @@ async def test_create_account( hass: HomeAssistant, client, enable_custom_integrations: None ) -> None: """Test a flow that creates an account.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) @@ -542,7 +542,7 @@ async def test_two_step_flow( mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): VERSION = 1 @@ -619,7 +619,7 @@ async def test_continue_flow_unauth( mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): VERSION = 1 @@ -666,7 +666,7 @@ async def test_get_progress_index( ) -> None: """Test querying for the flows that are in progress.""" assert await async_setup_component(hass, "config", {}) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) ws_client = await hass_ws_client(hass) class TestFlow(core_ce.ConfigFlow): @@ -714,7 +714,7 @@ async def test_get_progress_index_unauth( async def test_get_progress_flow(hass: HomeAssistant, client) -> None: """Test we can query the API for same result as we get from init a flow.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): async def async_step_user(self, user_input=None): @@ -750,7 +750,7 @@ async def test_get_progress_flow_unauth( hass: HomeAssistant, client, hass_admin_user: MockUser ) -> None: """Test we can can't query the API for result of flow.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): async def async_step_user(self, user_input=None): @@ -804,7 +804,7 @@ async def test_options_flow(hass: HomeAssistant, client) -> None: return OptionsFlowHandler() mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) MockConfigEntry( domain="test", entry_id="test1", @@ -862,7 +862,7 @@ async def test_options_flow_unauth( return OptionsFlowHandler() mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) MockConfigEntry( domain="test", entry_id="test1", @@ -883,7 +883,7 @@ async def test_two_step_options_flow(hass: HomeAssistant, client) -> None: mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): @staticmethod @@ -950,7 +950,7 @@ async def test_options_flow_with_invalid_data(hass: HomeAssistant, client) -> No mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): @staticmethod @@ -1265,7 +1265,7 @@ async def test_ignore_flow( mock_integration( hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) ) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(core_ce.ConfigFlow): VERSION = 1 diff --git a/tests/components/config/test_device_registry.py b/tests/components/config/test_device_registry.py index 87bb9cc9409..4a784a6eff1 100644 --- a/tests/components/config/test_device_registry.py +++ b/tests/components/config/test_device_registry.py @@ -242,7 +242,7 @@ async def test_remove_config_entry_from_device( response = await ws_client.receive_json() assert not response["success"] - assert response["error"]["code"] == "unknown_error" + assert response["error"]["code"] == "home_assistant_error" # Make async_remove_config_entry_device return True can_remove = True @@ -365,7 +365,7 @@ async def test_remove_config_entry_from_device_fails( response = await ws_client.receive_json() assert not response["success"] - assert response["error"]["code"] == "unknown_error" + assert response["error"]["code"] == "home_assistant_error" assert response["error"]["message"] == "Unknown config entry" # Try removing a config entry which does not support removal from the device @@ -380,7 +380,7 @@ async def test_remove_config_entry_from_device_fails( response = await ws_client.receive_json() assert not response["success"] - assert response["error"]["code"] == "unknown_error" + assert response["error"]["code"] == "home_assistant_error" assert ( response["error"]["message"] == "Config entry does not support device removal" ) @@ -397,7 +397,7 @@ async def test_remove_config_entry_from_device_fails( response = await ws_client.receive_json() assert not response["success"] - assert response["error"]["code"] == "unknown_error" + assert response["error"]["code"] == "home_assistant_error" assert response["error"]["message"] == "Unknown device" # Try removing a config entry from a device which it's not connected to @@ -428,7 +428,7 @@ async def test_remove_config_entry_from_device_fails( response = await ws_client.receive_json() assert not response["success"] - assert response["error"]["code"] == "unknown_error" + assert response["error"]["code"] == "home_assistant_error" assert response["error"]["message"] == "Config entry not in device" # Try removing a config entry which can't be loaded from a device - allowed @@ -443,5 +443,5 @@ async def test_remove_config_entry_from_device_fails( response = await ws_client.receive_json() assert not response["success"] - assert response["error"]["code"] == "unknown_error" + assert response["error"]["code"] == "home_assistant_error" assert response["error"]["message"] == "Integration not found" diff --git a/tests/components/conftest.py b/tests/components/conftest.py index c985565b1be..1ebcc864b4b 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -16,7 +16,7 @@ def patch_zeroconf_multiple_catcher() -> Generator[None, None, None]: yield -@pytest.fixture(autouse=True) +@pytest.fixture(scope="session", autouse=True) def prevent_io() -> Generator[None, None, None]: """Fixture to prevent certain I/O from happening.""" with patch( diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index c75c96ca59b..fe94e2d5425 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -1,4 +1,5 @@ """Test for the default agent.""" +from collections import defaultdict from unittest.mock import AsyncMock, patch import pytest @@ -293,3 +294,124 @@ async def test_nevermind_item(hass: HomeAssistant, init_components) -> None: assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert not result.response.speech + + +async def test_device_area_context( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test that including a device_id will target a specific area.""" + turn_on_calls = async_mock_service(hass, "light", "turn_on") + turn_off_calls = async_mock_service(hass, "light", "turn_off") + + area_kitchen = area_registry.async_get_or_create("kitchen") + area_bedroom = area_registry.async_get_or_create("bedroom") + + # Create 2 lights in each area + area_lights = defaultdict(list) + for area in (area_kitchen, area_bedroom): + for i in range(2): + light_entity = entity_registry.async_get_or_create( + "light", "demo", f"{area.name}-light-{i}" + ) + entity_registry.async_update_entity(light_entity.entity_id, area_id=area.id) + hass.states.async_set( + light_entity.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: f"{area.name} light {i}"}, + ) + area_lights[area.name].append(light_entity) + + # Create voice satellites in each area + entry = MockConfigEntry() + entry.add_to_hass(hass) + + kitchen_satellite = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "id-satellite-kitchen")}, + ) + device_registry.async_update_device(kitchen_satellite.id, area_id=area_kitchen.id) + + bedroom_satellite = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "id-satellite-bedroom")}, + ) + device_registry.async_update_device(bedroom_satellite.id, area_id=area_bedroom.id) + + # Turn on lights in the area of a device + result = await conversation.async_converse( + hass, + "turn on the lights", + None, + Context(), + None, + device_id=kitchen_satellite.id, + ) + await hass.async_block_till_done() + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + + # Verify only kitchen lights were targeted + assert {s.entity_id for s in result.response.matched_states} == { + e.entity_id for e in area_lights["kitchen"] + } + assert {c.data["entity_id"][0] for c in turn_on_calls} == { + e.entity_id for e in area_lights["kitchen"] + } + turn_on_calls.clear() + + # Ensure we can still target other areas by name + result = await conversation.async_converse( + hass, + "turn on lights in the bedroom", + None, + Context(), + None, + device_id=kitchen_satellite.id, + ) + await hass.async_block_till_done() + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + + # Verify only bedroom lights were targeted + assert {s.entity_id for s in result.response.matched_states} == { + e.entity_id for e in area_lights["bedroom"] + } + assert {c.data["entity_id"][0] for c in turn_on_calls} == { + e.entity_id for e in area_lights["bedroom"] + } + turn_on_calls.clear() + + # Turn off all lights in the area of the otherkj device + result = await conversation.async_converse( + hass, + "turn lights off", + None, + Context(), + None, + device_id=bedroom_satellite.id, + ) + await hass.async_block_till_done() + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + + # Verify only bedroom lights were targeted + assert {s.entity_id for s in result.response.matched_states} == { + e.entity_id for e in area_lights["bedroom"] + } + assert {c.data["entity_id"][0] for c in turn_off_calls} == { + e.entity_id for e in area_lights["bedroom"] + } + turn_off_calls.clear() + + # Not providing a device id should not match + for command in ("on", "off"): + result = await conversation.async_converse( + hass, f"turn {command} all lights", None, Context(), None + ) + assert result.response.response_type == intent.IntentResponseType.ERROR + assert ( + result.response.error_code == intent.IntentResponseErrorCode.NO_INTENT_MATCH + ) diff --git a/tests/components/deconz/test_sensor.py b/tests/components/deconz/test_sensor.py index 7fa93266aef..38d68d135b6 100644 --- a/tests/components/deconz/test_sensor.py +++ b/tests/components/deconz/test_sensor.py @@ -530,6 +530,55 @@ TEST_DATA = [ "next_state": "1.3", }, ), + ( # Particulate matter -> pm2_5 + { + "capabilities": { + "measured_value": { + "max": 999, + "min": 0, + "quantity": "density", + "substance": "PM2.5", + "unit": "ug/m^3", + } + }, + "config": {"on": True, "reachable": True}, + "ep": 1, + "etag": "2a67a4b5cbcc20532c0ee75e2abac0c3", + "lastannounced": None, + "lastseen": "2023-10-29T12:59Z", + "manufacturername": "IKEA of Sweden", + "modelid": "STARKVIND Air purifier table", + "name": "STARKVIND AirPurifier", + "productid": "E2006", + "state": { + "airquality": "excellent", + "lastupdated": "2023-10-29T12:59:27.976", + "measured_value": 1, + "pm2_5": 1, + }, + "swversion": "1.1.001", + "type": "ZHAParticulateMatter", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-042a", + }, + { + "entity_count": 1, + "device_count": 3, + "entity_id": "sensor.starkvind_airpurifier_pm25", + "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-042a-particulate_matter_pm2_5", + "state": "1", + "entity_category": None, + "device_class": SensorDeviceClass.PM25, + "state_class": SensorStateClass.MEASUREMENT, + "attributes": { + "friendly_name": "STARKVIND AirPurifier PM25", + "device_class": SensorDeviceClass.PM25, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + }, + "websocket_event": {"state": {"measured_value": 2}}, + "next_state": "2", + }, + ), ( # Power sensor { "config": { diff --git a/tests/components/demo/test_fan.py b/tests/components/demo/test_fan.py index 58a8c99ea3c..a3f607aee76 100644 --- a/tests/components/demo/test_fan.py +++ b/tests/components/demo/test_fan.py @@ -182,7 +182,7 @@ async def test_turn_on_with_preset_mode_only( assert state.state == STATE_OFF assert state.attributes[fan.ATTR_PRESET_MODE] is None - with pytest.raises(ValueError): + with pytest.raises(fan.NotValidPresetModeError) as exc: await hass.services.async_call( fan.DOMAIN, SERVICE_TURN_ON, @@ -190,6 +190,12 @@ async def test_turn_on_with_preset_mode_only( blocking=True, ) await hass.async_block_till_done() + assert exc.value.translation_domain == fan.DOMAIN + assert exc.value.translation_key == "not_valid_preset_mode" + assert exc.value.translation_placeholders == { + "preset_mode": "invalid", + "preset_modes": "auto, smart, sleep, on", + } state = hass.states.get(fan_entity_id) assert state.state == STATE_OFF @@ -250,7 +256,7 @@ async def test_turn_on_with_preset_mode_and_speed( assert state.attributes[fan.ATTR_PERCENTAGE] == 0 assert state.attributes[fan.ATTR_PRESET_MODE] is None - with pytest.raises(ValueError): + with pytest.raises(fan.NotValidPresetModeError) as exc: await hass.services.async_call( fan.DOMAIN, SERVICE_TURN_ON, @@ -258,6 +264,12 @@ async def test_turn_on_with_preset_mode_and_speed( blocking=True, ) await hass.async_block_till_done() + assert exc.value.translation_domain == fan.DOMAIN + assert exc.value.translation_key == "not_valid_preset_mode" + assert exc.value.translation_placeholders == { + "preset_mode": "invalid", + "preset_modes": "auto, smart, sleep, on", + } state = hass.states.get(fan_entity_id) assert state.state == STATE_OFF @@ -343,7 +355,7 @@ async def test_set_preset_mode_invalid(hass: HomeAssistant, fan_entity_id) -> No state = hass.states.get(fan_entity_id) assert state.state == STATE_OFF - with pytest.raises(ValueError): + with pytest.raises(fan.NotValidPresetModeError) as exc: await hass.services.async_call( fan.DOMAIN, fan.SERVICE_SET_PRESET_MODE, @@ -351,8 +363,10 @@ async def test_set_preset_mode_invalid(hass: HomeAssistant, fan_entity_id) -> No blocking=True, ) await hass.async_block_till_done() + assert exc.value.translation_domain == fan.DOMAIN + assert exc.value.translation_key == "not_valid_preset_mode" - with pytest.raises(ValueError): + with pytest.raises(fan.NotValidPresetModeError) as exc: await hass.services.async_call( fan.DOMAIN, SERVICE_TURN_ON, @@ -360,6 +374,8 @@ async def test_set_preset_mode_invalid(hass: HomeAssistant, fan_entity_id) -> No blocking=True, ) await hass.async_block_till_done() + assert exc.value.translation_domain == fan.DOMAIN + assert exc.value.translation_key == "not_valid_preset_mode" @pytest.mark.parametrize("fan_entity_id", FULL_FAN_ENTITY_IDS) diff --git a/tests/components/denonavr/test_config_flow.py b/tests/components/denonavr/test_config_flow.py index 93a6305655b..a0fb908d920 100644 --- a/tests/components/denonavr/test_config_flow.py +++ b/tests/components/denonavr/test_config_flow.py @@ -65,7 +65,8 @@ def denonavr_connect_fixture(): "homeassistant.components.denonavr.receiver.DenonAVR.receiver_type", TEST_RECEIVER_TYPE, ), patch( - "homeassistant.components.denonavr.async_setup_entry", return_value=True + "homeassistant.components.denonavr.async_setup_entry", + return_value=True, ): yield diff --git a/tests/components/devialet/__init__.py b/tests/components/devialet/__init__.py new file mode 100644 index 00000000000..28ab6229c44 --- /dev/null +++ b/tests/components/devialet/__init__.py @@ -0,0 +1,150 @@ +"""Tests for the Devialet integration.""" + +from ipaddress import ip_address + +from aiohttp import ClientError as ServerTimeoutError +from devialet.const import UrlSuffix + +from homeassistant.components import zeroconf +from homeassistant.components.devialet.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_NAME, CONTENT_TYPE_JSON +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker + +NAME = "Livingroom" +SERIAL = "L00P00000AB11" +HOST = "127.0.0.1" +CONF_INPUT = {CONF_HOST: HOST} + +CONF_DATA = { + CONF_HOST: HOST, + CONF_NAME: NAME, +} + +MOCK_CONFIG = {DOMAIN: [{CONF_HOST: HOST}]} +MOCK_USER_INPUT = {CONF_HOST: HOST} +MOCK_ZEROCONF_DATA = zeroconf.ZeroconfServiceInfo( + ip_address=ip_address(HOST), + ip_addresses=[ip_address(HOST)], + hostname="PhantomISilver-L00P00000AB11.local.", + type="_devialet-http._tcp.", + name="Livingroom", + port=80, + properties={ + "_raw": { + "firmwareFamily": "DOS", + "firmwareVersion": "2.16.1.49152", + "ipControlVersion": "1", + "manufacturer": "Devialet", + "model": "Phantom I Silver", + "path": "/ipcontrol/v1", + "serialNumber": "L00P00000AB11", + }, + "firmwareFamily": "DOS", + "firmwareVersion": "2.16.1.49152", + "ipControlVersion": "1", + "manufacturer": "Devialet", + "model": "Phantom I Silver", + "path": "/ipcontrol/v1", + "serialNumber": "L00P00000AB11", + }, +) + + +def mock_unavailable(aioclient_mock: AiohttpClientMocker) -> None: + """Mock the Devialet connection for Home Assistant.""" + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_GENERAL_INFO}", exc=ServerTimeoutError + ) + + +def mock_idle(aioclient_mock: AiohttpClientMocker) -> None: + """Mock the Devialet connection for Home Assistant.""" + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_GENERAL_INFO}", + text=load_fixture("general_info.json", DOMAIN), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) + + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_CURRENT_SOURCE}", + exc=ServerTimeoutError, + ) + + +def mock_playing(aioclient_mock: AiohttpClientMocker) -> None: + """Mock the Devialet connection for Home Assistant.""" + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_GENERAL_INFO}", + text=load_fixture("general_info.json", DOMAIN), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) + + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_CURRENT_SOURCE}", + text=load_fixture("source_state.json", DOMAIN), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) + + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_SOURCES}", + text=load_fixture("sources.json", DOMAIN), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) + + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_VOLUME}", + text=load_fixture("volume.json", DOMAIN), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) + + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_NIGHT_MODE}", + text=load_fixture("night_mode.json", DOMAIN), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) + + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_EQUALIZER}", + text=load_fixture("equalizer.json", DOMAIN), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) + + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_CURRENT_POSITION}", + text=load_fixture("current_position.json", DOMAIN), + headers={"Content-Type": CONTENT_TYPE_JSON}, + ) + + +async def setup_integration( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + skip_entry_setup: bool = False, + state: str = "playing", + serial: str = SERIAL, +) -> MockConfigEntry: + """Set up the Devialet integration in Home Assistant.""" + + if state == "playing": + mock_playing(aioclient_mock) + elif state == "unavailable": + mock_unavailable(aioclient_mock) + elif state == "idle": + mock_idle(aioclient_mock) + + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=serial, + data=CONF_DATA, + ) + + entry.add_to_hass(hass) + + if not skip_entry_setup: + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + return entry diff --git a/tests/components/devialet/fixtures/current_position.json b/tests/components/devialet/fixtures/current_position.json new file mode 100644 index 00000000000..2b9761cc03a --- /dev/null +++ b/tests/components/devialet/fixtures/current_position.json @@ -0,0 +1,3 @@ +{ + "position": 123102 +} diff --git a/tests/components/devialet/fixtures/equalizer.json b/tests/components/devialet/fixtures/equalizer.json new file mode 100644 index 00000000000..be9ea651d6e --- /dev/null +++ b/tests/components/devialet/fixtures/equalizer.json @@ -0,0 +1,26 @@ +{ + "availablePresets": ["custom", "flat", "voice"], + "currentEqualization": { + "high": { + "gain": 0 + }, + "low": { + "gain": 0 + } + }, + "customEqualization": { + "high": { + "gain": 0 + }, + "low": { + "gain": 0 + } + }, + "enabled": true, + "gainRange": { + "max": 6, + "min": -6, + "stepPrecision": 1 + }, + "preset": "flat" +} diff --git a/tests/components/devialet/fixtures/general_info.json b/tests/components/devialet/fixtures/general_info.json new file mode 100644 index 00000000000..6ff1a724f08 --- /dev/null +++ b/tests/components/devialet/fixtures/general_info.json @@ -0,0 +1,18 @@ +{ + "deviceId": "1abcdef2-3456-67g8-9h0i-1jk23456lm78", + "deviceName": "Livingroom", + "firmwareFamily": "DOS", + "groupId": "12345678-901a-2b3c-def4-567g89h0i12j", + "ipControlVersion": "1", + "model": "Phantom I Silver", + "release": { + "buildType": "release", + "canonicalVersion": "2.16.1.49152", + "version": "2.16.1" + }, + "role": "FrontLeft", + "serial": "L00P00000AB11", + "standbyEntryDelay": 0, + "standbyState": "Unknown", + "systemId": "a12b345c-67d8-90e1-12f4-g5hij67890kl" +} diff --git a/tests/components/devialet/fixtures/night_mode.json b/tests/components/devialet/fixtures/night_mode.json new file mode 100644 index 00000000000..e61cc12151d --- /dev/null +++ b/tests/components/devialet/fixtures/night_mode.json @@ -0,0 +1,3 @@ +{ + "nightMode": "off" +} diff --git a/tests/components/devialet/fixtures/no_current_source.json b/tests/components/devialet/fixtures/no_current_source.json new file mode 100644 index 00000000000..ac16468597d --- /dev/null +++ b/tests/components/devialet/fixtures/no_current_source.json @@ -0,0 +1,7 @@ +{ + "error": { + "code": "NoCurrentSource", + "details": {}, + "message": "" + } +} diff --git a/tests/components/devialet/fixtures/source_state.json b/tests/components/devialet/fixtures/source_state.json new file mode 100644 index 00000000000..d389675ac98 --- /dev/null +++ b/tests/components/devialet/fixtures/source_state.json @@ -0,0 +1,20 @@ +{ + "availableOptions": ["play", "pause", "previous", "next", "seek"], + "metadata": { + "album": "1 (Remastered)", + "artist": "The Beatles", + "coverArtDataPresent": false, + "coverArtUrl": "https://i.scdn.co/image/ab67616d0000b273582d56ce20fe0146ffa0e5cf", + "duration": 425653, + "mediaType": "unknown", + "title": "Hey Jude - Remastered 2015" + }, + "muteState": "unmuted", + "peerDeviceName": "", + "playingState": "playing", + "source": { + "deviceId": "1abcdef2-3456-67g8-9h0i-1jk23456lm78", + "sourceId": "7b0d8ed0-5650-45cd-841b-647b78730bfb", + "type": "spotifyconnect" + } +} diff --git a/tests/components/devialet/fixtures/sources.json b/tests/components/devialet/fixtures/sources.json new file mode 100644 index 00000000000..5f484314d73 --- /dev/null +++ b/tests/components/devialet/fixtures/sources.json @@ -0,0 +1,41 @@ +{ + "sources": [ + { + "deviceId": "1abcdef2-3456-67g8-9h0i-1jk23456lm78", + "sourceId": "7b0d8ed0-5650-45cd-841b-647b78730bfb", + "type": "spotifyconnect" + }, + { + "deviceId": "9abc87d6-ef54-321d-0g9h-ijk876l54m32", + "sourceId": "12708064-01fa-4e25-a0f1-f94b3de49baa", + "streamLockAvailable": false, + "type": "optical" + }, + { + "deviceId": "1abcdef2-3456-67g8-9h0i-1jk23456lm78", + "sourceId": "82834351-8255-4e2e-9ce2-b7d4da0aa3b0", + "streamLockAvailable": false, + "type": "optical" + }, + { + "deviceId": "1abcdef2-3456-67g8-9h0i-1jk23456lm78", + "sourceId": "07b1bf6d-9216-4a7b-8d53-5590cee21d90", + "type": "upnp" + }, + { + "deviceId": "1abcdef2-3456-67g8-9h0i-1jk23456lm78", + "sourceId": "1015e17d-d515-419d-a47b-4a7252bff838", + "type": "airplay2" + }, + { + "deviceId": "1abcdef2-3456-67g8-9h0i-1jk23456lm78", + "sourceId": "88186c24-f896-4ef0-a731-a6c8f8f01908", + "type": "bluetooth" + }, + { + "deviceId": "1abcdef2-3456-67g8-9h0i-1jk23456lm78", + "sourceId": "acfd9fe6-7e29-4c2b-b2bd-5083486a5291", + "type": "raat" + } + ] +} diff --git a/tests/components/devialet/fixtures/system_info.json b/tests/components/devialet/fixtures/system_info.json new file mode 100644 index 00000000000..f496e5557d2 --- /dev/null +++ b/tests/components/devialet/fixtures/system_info.json @@ -0,0 +1,6 @@ +{ + "availableFeatures": ["nightMode", "equalizer", "balance"], + "groupId": "12345678-901a-2b3c-def4-567g89h0i12j", + "systemId": "a12b345c-67d8-90e1-12f4-g5hij67890kl", + "systemName": "Devialet" +} diff --git a/tests/components/devialet/fixtures/volume.json b/tests/components/devialet/fixtures/volume.json new file mode 100644 index 00000000000..365d5ed776d --- /dev/null +++ b/tests/components/devialet/fixtures/volume.json @@ -0,0 +1,3 @@ +{ + "volume": 20 +} diff --git a/tests/components/devialet/test_config_flow.py b/tests/components/devialet/test_config_flow.py new file mode 100644 index 00000000000..0bacc558b74 --- /dev/null +++ b/tests/components/devialet/test_config_flow.py @@ -0,0 +1,154 @@ +"""Test the Devialet config flow.""" +from unittest.mock import patch + +from aiohttp import ClientError as HTTPClientError +from devialet.const import UrlSuffix + +from homeassistant.components.devialet.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import ( + HOST, + MOCK_USER_INPUT, + MOCK_ZEROCONF_DATA, + NAME, + mock_playing, + setup_integration, +) + +from tests.test_util.aiohttp import AiohttpClientMocker + + +async def test_show_user_form(hass: HomeAssistant) -> None: + """Test that the user set up form is served.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + ) + + assert result["step_id"] == "user" + assert result["type"] == FlowResultType.FORM + + +async def test_cannot_connect( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test we show user form on connection error.""" + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_GENERAL_INFO}", exc=HTTPClientError + ) + + user_input = MOCK_USER_INPUT.copy() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + data=user_input, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + +async def test_user_device_exists_abort( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test we abort user flow if DirecTV receiver already configured.""" + await setup_integration(hass, aioclient_mock, skip_entry_setup=True) + + user_input = MOCK_USER_INPUT.copy() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + data=user_input, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_full_user_flow_implementation( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test the full manual user flow from start to finish.""" + mock_playing(aioclient_mock) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = MOCK_USER_INPUT.copy() + with patch( + "homeassistant.components.devialet.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == NAME + + assert result["data"] + assert result["data"][CONF_HOST] == HOST + + +async def test_zeroconf_devialet( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test we pass Devialet devices to the discovery manager.""" + mock_playing(aioclient_mock) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + + assert result["type"] == "form" + + with patch( + "homeassistant.components.devialet.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + await hass.async_block_till_done() + + assert result2["type"] == "create_entry" + assert result2["title"] == "Livingroom" + assert result2["data"] == { + CONF_HOST: HOST, + CONF_NAME: NAME, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_async_step_confirm( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test starting a flow from discovery.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] == "form" + assert result["step_id"] == "confirm" + + aioclient_mock.get( + f"http://{HOST}{UrlSuffix.GET_GENERAL_INFO}", exc=HTTPClientError + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=MOCK_USER_INPUT.copy() + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["errors"] == {"base": "cannot_connect"} diff --git a/tests/components/devialet/test_diagnostics.py b/tests/components/devialet/test_diagnostics.py new file mode 100644 index 00000000000..82600de7cf5 --- /dev/null +++ b/tests/components/devialet/test_diagnostics.py @@ -0,0 +1,40 @@ +"""Test the Devialet diagnostics.""" +import json + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import load_fixture +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test diagnostics.""" + entry = await setup_integration(hass, aioclient_mock) + + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == { + "is_available": True, + "general_info": json.loads(load_fixture("general_info.json", "devialet")), + "sources": json.loads(load_fixture("sources.json", "devialet")), + "source_state": json.loads(load_fixture("source_state.json", "devialet")), + "volume": json.loads(load_fixture("volume.json", "devialet")), + "night_mode": json.loads(load_fixture("night_mode.json", "devialet")), + "equalizer": json.loads(load_fixture("equalizer.json", "devialet")), + "source_list": [ + "Airplay", + "Bluetooth", + "Online", + "Optical left", + "Optical right", + "Raat", + "Spotify Connect", + ], + "source": "spotifyconnect", + } diff --git a/tests/components/devialet/test_init.py b/tests/components/devialet/test_init.py new file mode 100644 index 00000000000..86d383e91d8 --- /dev/null +++ b/tests/components/devialet/test_init.py @@ -0,0 +1,49 @@ +"""Test the Devialet init.""" +from homeassistant.components.devialet.const import DOMAIN +from homeassistant.components.media_player import DOMAIN as MP_DOMAIN, MediaPlayerState +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import NAME, setup_integration + +from tests.test_util.aiohttp import AiohttpClientMocker + + +async def test_load_unload_config_entry( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test the Devialet configuration entry loading and unloading.""" + entry = await setup_integration(hass, aioclient_mock) + + assert entry.entry_id in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.LOADED + assert entry.unique_id is not None + + state = hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}") + assert state.state == MediaPlayerState.PLAYING + + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.entry_id not in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_load_unload_config_entry_when_device_unavailable( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test the Devialet configuration entry loading and unloading when the device is unavailable.""" + entry = await setup_integration(hass, aioclient_mock, state="unavailable") + + assert entry.entry_id in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.LOADED + assert entry.unique_id is not None + + state = hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}") + assert state.state == "unavailable" + + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.entry_id not in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/devialet/test_media_player.py b/tests/components/devialet/test_media_player.py new file mode 100644 index 00000000000..56381bf6de4 --- /dev/null +++ b/tests/components/devialet/test_media_player.py @@ -0,0 +1,312 @@ +"""Test the Devialet init.""" +from unittest.mock import PropertyMock, patch + +from devialet import DevialetApi +from devialet.const import UrlSuffix +from yarl import URL + +from homeassistant.components.devialet.const import DOMAIN +from homeassistant.components.devialet.media_player import SUPPORT_DEVIALET +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_INPUT_SOURCE_LIST, + ATTR_MEDIA_ALBUM_NAME, + ATTR_MEDIA_ARTIST, + ATTR_MEDIA_DURATION, + ATTR_MEDIA_POSITION, + ATTR_MEDIA_POSITION_UPDATED_AT, + ATTR_MEDIA_TITLE, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + ATTR_SOUND_MODE, + ATTR_SOUND_MODE_LIST, + DOMAIN as MP_DOMAIN, + SERVICE_SELECT_SOUND_MODE, + SERVICE_SELECT_SOURCE, + MediaPlayerState, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_ENTITY_PICTURE, + ATTR_SUPPORTED_FEATURES, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_SEEK, + SERVICE_MEDIA_STOP, + SERVICE_TURN_OFF, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import HOST, NAME, setup_integration + +from tests.test_util.aiohttp import AiohttpClientMocker + +SERVICE_TO_URL = { + SERVICE_MEDIA_SEEK: [UrlSuffix.SEEK], + SERVICE_MEDIA_PLAY: [UrlSuffix.PLAY], + SERVICE_MEDIA_PAUSE: [UrlSuffix.PAUSE], + SERVICE_MEDIA_STOP: [UrlSuffix.PAUSE], + SERVICE_MEDIA_PREVIOUS_TRACK: [UrlSuffix.PREVIOUS_TRACK], + SERVICE_MEDIA_NEXT_TRACK: [UrlSuffix.NEXT_TRACK], + SERVICE_TURN_OFF: [UrlSuffix.TURN_OFF], + SERVICE_VOLUME_UP: [UrlSuffix.VOLUME_UP], + SERVICE_VOLUME_DOWN: [UrlSuffix.VOLUME_DOWN], + SERVICE_VOLUME_SET: [UrlSuffix.VOLUME_SET], + SERVICE_VOLUME_MUTE: [UrlSuffix.MUTE, UrlSuffix.UNMUTE], + SERVICE_SELECT_SOUND_MODE: [UrlSuffix.EQUALIZER, UrlSuffix.NIGHT_MODE], + SERVICE_SELECT_SOURCE: [ + str(UrlSuffix.SELECT_SOURCE).replace( + "%SOURCE_ID%", "82834351-8255-4e2e-9ce2-b7d4da0aa3b0" + ), + str(UrlSuffix.SELECT_SOURCE).replace( + "%SOURCE_ID%", "07b1bf6d-9216-4a7b-8d53-5590cee21d90" + ), + ], +} + +SERVICE_TO_DATA = { + SERVICE_MEDIA_SEEK: [{"seek_position": 321}], + SERVICE_MEDIA_PLAY: [{}], + SERVICE_MEDIA_PAUSE: [{}], + SERVICE_MEDIA_STOP: [{}], + SERVICE_MEDIA_PREVIOUS_TRACK: [{}], + SERVICE_MEDIA_NEXT_TRACK: [{}], + SERVICE_TURN_OFF: [{}], + SERVICE_VOLUME_UP: [{}], + SERVICE_VOLUME_DOWN: [{}], + SERVICE_VOLUME_SET: [{ATTR_MEDIA_VOLUME_LEVEL: 0.5}], + SERVICE_VOLUME_MUTE: [ + {ATTR_MEDIA_VOLUME_MUTED: True}, + {ATTR_MEDIA_VOLUME_MUTED: False}, + ], + SERVICE_SELECT_SOUND_MODE: [ + {ATTR_SOUND_MODE: "Night mode"}, + {ATTR_SOUND_MODE: "Flat"}, + ], + SERVICE_SELECT_SOURCE: [ + {ATTR_INPUT_SOURCE: "Optical left"}, + {ATTR_INPUT_SOURCE: "Online"}, + ], +} + + +async def test_media_player_playing( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test the Devialet configuration entry loading and unloading.""" + await async_setup_component(hass, "homeassistant", {}) + entry = await setup_integration(hass, aioclient_mock) + + assert entry.entry_id in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + "homeassistant", + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: [f"{MP_DOMAIN}.{NAME.lower()}"]}, + blocking=True, + ) + + state = hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}") + assert state.state == MediaPlayerState.PLAYING + assert state.name == NAME + assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.2 + assert state.attributes[ATTR_MEDIA_VOLUME_MUTED] is False + assert state.attributes[ATTR_INPUT_SOURCE_LIST] is not None + assert state.attributes[ATTR_SOUND_MODE_LIST] is not None + assert state.attributes[ATTR_MEDIA_ARTIST] == "The Beatles" + assert state.attributes[ATTR_MEDIA_ALBUM_NAME] == "1 (Remastered)" + assert state.attributes[ATTR_MEDIA_TITLE] == "Hey Jude - Remastered 2015" + assert state.attributes[ATTR_ENTITY_PICTURE] is not None + assert state.attributes[ATTR_MEDIA_DURATION] == 425653 + assert state.attributes[ATTR_MEDIA_POSITION] == 123102 + assert state.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] is not None + assert state.attributes[ATTR_SUPPORTED_FEATURES] is not None + assert state.attributes[ATTR_INPUT_SOURCE] is not None + assert state.attributes[ATTR_SOUND_MODE] is not None + + with patch( + "homeassistant.components.devialet.DevialetApi.playing_state", + new_callable=PropertyMock, + ) as mock: + mock.return_value = MediaPlayerState.PAUSED + + await hass.config_entries.async_reload(entry.entry_id) + await hass.async_block_till_done() + assert ( + hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}").state + == MediaPlayerState.PAUSED + ) + + with patch( + "homeassistant.components.devialet.DevialetApi.playing_state", + new_callable=PropertyMock, + ) as mock: + mock.return_value = MediaPlayerState.ON + + await hass.config_entries.async_reload(entry.entry_id) + await hass.async_block_till_done() + assert ( + hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}").state == MediaPlayerState.ON + ) + + with patch.object(DevialetApi, "equalizer", new_callable=PropertyMock) as mock: + mock.return_value = None + + with patch.object(DevialetApi, "night_mode", new_callable=PropertyMock) as mock: + mock.return_value = True + + await hass.config_entries.async_reload(entry.entry_id) + await hass.async_block_till_done() + assert ( + hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}").attributes[ + ATTR_SOUND_MODE + ] + == "Night mode" + ) + + with patch.object(DevialetApi, "equalizer", new_callable=PropertyMock) as mock: + mock.return_value = "unexpected_value" + + with patch.object(DevialetApi, "night_mode", new_callable=PropertyMock) as mock: + mock.return_value = False + + await hass.config_entries.async_reload(entry.entry_id) + await hass.async_block_till_done() + assert ( + ATTR_SOUND_MODE + not in hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}").attributes + ) + + with patch.object(DevialetApi, "equalizer", new_callable=PropertyMock) as mock: + mock.return_value = None + + with patch.object(DevialetApi, "night_mode", new_callable=PropertyMock) as mock: + mock.return_value = None + + await hass.config_entries.async_reload(entry.entry_id) + await hass.async_block_till_done() + assert ( + ATTR_SOUND_MODE + not in hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}").attributes + ) + + with patch.object( + DevialetApi, "available_options", new_callable=PropertyMock + ) as mock: + mock.return_value = None + await hass.config_entries.async_reload(entry.entry_id) + await hass.async_block_till_done() + assert ( + hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}").attributes[ + ATTR_SUPPORTED_FEATURES + ] + == SUPPORT_DEVIALET + ) + + with patch.object(DevialetApi, "source", new_callable=PropertyMock) as mock: + mock.return_value = "someSource" + await hass.config_entries.async_reload(entry.entry_id) + await hass.async_block_till_done() + assert ( + ATTR_INPUT_SOURCE + not in hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}").attributes + ) + + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.entry_id not in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_media_player_offline( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test the Devialet configuration entry loading and unloading.""" + entry = await setup_integration(hass, aioclient_mock, state=STATE_UNAVAILABLE) + + assert entry.entry_id in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.LOADED + + state = hass.states.get(f"{MP_DOMAIN}.{NAME.lower()}") + assert state.state == STATE_UNAVAILABLE + assert state.name == NAME + + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.entry_id not in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_media_player_without_serial( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test the Devialet configuration entry loading and unloading.""" + entry = await setup_integration(hass, aioclient_mock, serial=None) + + assert entry.entry_id in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.LOADED + assert entry.unique_id is None + + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.entry_id not in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_media_player_services( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test the Devialet services.""" + entry = await setup_integration( + hass, aioclient_mock, state=MediaPlayerState.PLAYING + ) + + assert entry.entry_id in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.LOADED + + target = {ATTR_ENTITY_ID: hass.states.get(f"{MP_DOMAIN}.{NAME}").entity_id} + + for i, (service, urls) in enumerate(SERVICE_TO_URL.items()): + for url in urls: + aioclient_mock.post(f"http://{HOST}{url}") + + for data_set in list(SERVICE_TO_DATA.values())[i]: + service_data = target.copy() + service_data.update(data_set) + + await hass.services.async_call( + MP_DOMAIN, + service, + service_data=service_data, + blocking=True, + ) + await hass.async_block_till_done() + + for url in urls: + call_available = False + for item in aioclient_mock.mock_calls: + if item[0] == "POST" and item[1] == URL(f"http://{HOST}{url}"): + call_available = True + break + + assert call_available + + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.entry_id not in hass.data[DOMAIN] + assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/dhcp/test_init.py b/tests/components/dhcp/test_init.py index 47933c30537..5013568ad39 100644 --- a/tests/components/dhcp/test_init.py +++ b/tests/components/dhcp/test_init.py @@ -151,8 +151,11 @@ async def _async_get_handle_dhcp_packet(hass, integration_matchers): with patch( "homeassistant.components.dhcp._verify_l2socket_setup", ), patch( - "scapy.arch.common.compile_filter" - ), patch("scapy.sendrecv.AsyncSniffer", _mock_sniffer): + "scapy.arch.common.compile_filter", + ), patch( + "scapy.sendrecv.AsyncSniffer", + _mock_sniffer, + ): await dhcp_watcher.async_start() return async_handle_dhcp_packet diff --git a/tests/components/discovergy/conftest.py b/tests/components/discovergy/conftest.py index ea0fe84852f..819a1cbb72a 100644 --- a/tests/components/discovergy/conftest.py +++ b/tests/components/discovergy/conftest.py @@ -1,33 +1,61 @@ """Fixtures for Discovergy integration tests.""" -from unittest.mock import AsyncMock, Mock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, patch +from pydiscovergy.models import Reading import pytest from homeassistant.components.discovergy import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -from tests.components.discovergy.const import GET_METERS +from tests.components.discovergy.const import GET_METERS, LAST_READING, LAST_READING_GAS -@pytest.fixture -def mock_meters() -> Mock: - """Patch libraries.""" - with patch("pydiscovergy.Discovergy.meters") as discovergy: - discovergy.side_effect = AsyncMock(return_value=GET_METERS) - yield discovergy +def _meter_last_reading(meter_id: str) -> Reading: + """Side effect function for Discovergy mock.""" + return ( + LAST_READING_GAS + if meter_id == "d81a652fe0824f9a9d336016587d3b9d" + else LAST_READING + ) -@pytest.fixture +@pytest.fixture(name="discovergy") +def mock_discovergy() -> Generator[AsyncMock, None, None]: + """Mock the pydiscovergy client.""" + with patch( + "homeassistant.components.discovergy.Discovergy", + autospec=True, + ) as mock_discovergy, patch( + "homeassistant.components.discovergy.config_flow.Discovergy", + new=mock_discovergy, + ): + mock = mock_discovergy.return_value + mock.meters.return_value = GET_METERS + mock.meter_last_reading.side_effect = _meter_last_reading + yield mock + + +@pytest.fixture(name="config_entry") async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return a MockConfigEntry for testing.""" - entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, title="user@example.org", unique_id="user@example.org", data={CONF_EMAIL: "user@example.org", CONF_PASSWORD: "supersecretpassword"}, ) - entry.add_to_hass(hass) - return entry + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, discovergy: AsyncMock +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() diff --git a/tests/components/discovergy/const.py b/tests/components/discovergy/const.py index 5c233d50ba8..6c5428741af 100644 --- a/tests/components/discovergy/const.py +++ b/tests/components/discovergy/const.py @@ -30,6 +30,32 @@ GET_METERS = [ "last_measurement_time": 1678430543742, }, ), + Meter( + meter_id="d81a652fe0824f9a9d336016587d3b9d", + serial_number="def456", + full_serial_number="def456", + type="PIP", + measurement_type="GAS", + load_profile_type="SLP", + location=Location( + zip=12345, + city="Testhause", + street="Teststraße", + street_number="1", + country="Germany", + ), + additional={ + "manufacturer_id": "TST", + "printed_full_serial_number": "def456", + "administration_number": "12345", + "scaling_factor": 1, + "current_scaling_factor": 1, + "voltage_scaling_factor": 1, + "internal_meters": 1, + "first_measurement_time": 1517569090926, + "last_measurement_time": 1678430543742, + }, + ), ] LAST_READING = Reading( @@ -50,3 +76,8 @@ LAST_READING = Reading( "voltage3": 239000.0, }, ) + +LAST_READING_GAS = Reading( + time=datetime.datetime(2023, 3, 10, 7, 32, 6, 702000), + values={"actualityDuration": 52000.0, "storageNumber": 0.0, "volume": 21064800.0}, +) diff --git a/tests/components/discovergy/snapshots/test_diagnostics.ambr b/tests/components/discovergy/snapshots/test_diagnostics.ambr index d02f57c7540..2a7dd6903af 100644 --- a/tests/components/discovergy/snapshots/test_diagnostics.ambr +++ b/tests/components/discovergy/snapshots/test_diagnostics.ambr @@ -22,8 +22,36 @@ 'serial_number': '**REDACTED**', 'type': 'TST', }), + dict({ + 'additional': dict({ + 'administration_number': '**REDACTED**', + 'current_scaling_factor': 1, + 'first_measurement_time': 1517569090926, + 'internal_meters': 1, + 'last_measurement_time': 1678430543742, + 'manufacturer_id': 'TST', + 'printed_full_serial_number': '**REDACTED**', + 'scaling_factor': 1, + 'voltage_scaling_factor': 1, + }), + 'full_serial_number': '**REDACTED**', + 'load_profile_type': 'SLP', + 'location': '**REDACTED**', + 'measurement_type': 'GAS', + 'meter_id': 'd81a652fe0824f9a9d336016587d3b9d', + 'serial_number': '**REDACTED**', + 'type': 'PIP', + }), ]), 'readings': dict({ + 'd81a652fe0824f9a9d336016587d3b9d': dict({ + 'time': '2023-03-10T07:32:06.702000', + 'values': dict({ + 'actualityDuration': 52000.0, + 'storageNumber': 0.0, + 'volume': 21064800.0, + }), + }), 'f8d610b7a8cc4e73939fa33b990ded54': dict({ 'time': '2023-03-10T07:32:06.702000', 'values': dict({ diff --git a/tests/components/discovergy/snapshots/test_sensor.ambr b/tests/components/discovergy/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..981d1119a93 --- /dev/null +++ b/tests/components/discovergy/snapshots/test_sensor.ambr @@ -0,0 +1,222 @@ +# serializer version: 1 +# name: test_sensor[electricity last transmitted] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.electricity_teststrasse_1_last_transmitted', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last transmitted', + 'platform': 'discovergy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_transmitted', + 'unique_id': 'abc123-last_transmitted', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[electricity last transmitted].1 + None +# --- +# name: test_sensor[electricity total consumption] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.electricity_teststrasse_1_total_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 4, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total consumption', + 'platform': 'discovergy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_consumption', + 'unique_id': 'abc123-energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[electricity total consumption].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Electricity Teststraße 1 Total consumption', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.electricity_teststrasse_1_total_consumption', + 'last_changed': , + 'last_updated': , + 'state': '11934.8699715', + }) +# --- +# name: test_sensor[electricity total power] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.electricity_teststrasse_1_total_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total power', + 'platform': 'discovergy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_power', + 'unique_id': 'abc123-power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[electricity total power].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Electricity Teststraße 1 Total power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.electricity_teststrasse_1_total_power', + 'last_changed': , + 'last_updated': , + 'state': '531.75', + }) +# --- +# name: test_sensor[gas last transmitted] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gas_teststrasse_1_last_transmitted', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last transmitted', + 'platform': 'discovergy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_transmitted', + 'unique_id': 'def456-last_transmitted', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[gas last transmitted].1 + None +# --- +# name: test_sensor[gas total consumption] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gas_teststrasse_1_total_gas_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 4, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total gas consumption', + 'platform': 'discovergy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_gas_consumption', + 'unique_id': 'def456-volume', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[gas total consumption].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'gas', + 'friendly_name': 'Gas Teststraße 1 Total gas consumption', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gas_teststrasse_1_total_gas_consumption', + 'last_changed': , + 'last_updated': , + 'state': '21064.8', + }) +# --- diff --git a/tests/components/discovergy/test_config_flow.py b/tests/components/discovergy/test_config_flow.py index 08e9df06978..7c257f814c4 100644 --- a/tests/components/discovergy/test_config_flow.py +++ b/tests/components/discovergy/test_config_flow.py @@ -1,5 +1,5 @@ """Test the Discovergy config flow.""" -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, patch from pydiscovergy.error import DiscovergyClientError, HTTPError, InvalidLogin import pytest @@ -11,10 +11,9 @@ from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -from tests.components.discovergy.const import GET_METERS -async def test_form(hass: HomeAssistant, mock_meters: Mock) -> None: +async def test_form(hass: HomeAssistant, discovergy: AsyncMock) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -45,12 +44,14 @@ async def test_form(hass: HomeAssistant, mock_meters: Mock) -> None: async def test_reauth( - hass: HomeAssistant, mock_meters: Mock, mock_config_entry: MockConfigEntry + hass: HomeAssistant, config_entry: MockConfigEntry, discovergy: AsyncMock ) -> None: """Test reauth flow.""" + config_entry.add_to_hass(hass) + init_result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": mock_config_entry.unique_id}, + context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, data=None, ) @@ -84,35 +85,34 @@ async def test_reauth( (Exception, "unknown"), ], ) -async def test_form_fail(hass: HomeAssistant, error: Exception, message: str) -> None: +async def test_form_fail( + hass: HomeAssistant, discovergy: AsyncMock, error: Exception, message: str +) -> None: """Test to handle exceptions.""" + discovergy.meters.side_effect = error + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "test-password", + }, + ) - with patch( - "pydiscovergy.Discovergy.meters", - side_effect=error, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - CONF_EMAIL: "test@example.com", - CONF_PASSWORD: "test-password", - }, - ) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": message} - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": message} + # reset and test for success + discovergy.meters.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "test-password", + }, + ) - with patch("pydiscovergy.Discovergy.meters", return_value=GET_METERS): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_EMAIL: "test@example.com", - CONF_PASSWORD: "test-password", - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "test@example.com" - assert "errors" not in result + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["title"] == "test@example.com" + assert "errors" not in result diff --git a/tests/components/discovergy/test_diagnostics.py b/tests/components/discovergy/test_diagnostics.py index d7565e3f0c4..f2db5fb854d 100644 --- a/tests/components/discovergy/test_diagnostics.py +++ b/tests/components/discovergy/test_diagnostics.py @@ -1,31 +1,22 @@ """Test Discovergy diagnostics.""" -from unittest.mock import patch - +import pytest from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.components.discovergy.const import GET_METERS, LAST_READING from tests.typing import ClientSessionGenerator +@pytest.mark.usefixtures("setup_integration") async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, + config_entry: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - with patch("pydiscovergy.Discovergy.meters", return_value=GET_METERS), patch( - "pydiscovergy.Discovergy.meter_last_reading", return_value=LAST_READING - ): - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - result = await get_diagnostics_for_config_entry( - hass, hass_client, mock_config_entry - ) + result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) assert result == snapshot diff --git a/tests/components/discovergy/test_init.py b/tests/components/discovergy/test_init.py new file mode 100644 index 00000000000..ac8f79540f5 --- /dev/null +++ b/tests/components/discovergy/test_init.py @@ -0,0 +1,62 @@ +"""Test Discovergy component setup.""" +from unittest.mock import AsyncMock + +from pydiscovergy.error import DiscovergyClientError, HTTPError, InvalidLogin +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("discovergy") +async def test_config_setup( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test for setup success.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize( + ("error", "expected_state"), + [ + (InvalidLogin, ConfigEntryState.SETUP_ERROR), + (HTTPError, ConfigEntryState.SETUP_RETRY), + (DiscovergyClientError, ConfigEntryState.SETUP_RETRY), + (Exception, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_config_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + discovergy: AsyncMock, + error: Exception, + expected_state: ConfigEntryState, +) -> None: + """Test for setup failure.""" + config_entry.add_to_hass(hass) + + discovergy.meters.side_effect = error + + await hass.config_entries.async_setup(config_entry.entry_id) + assert config_entry.state is expected_state + + +@pytest.mark.usefixtures("setup_integration") +async def test_reload_config_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test config entry reload.""" + new_data = {"email": "abc@example.com", "password": "password"} + + assert config_entry.state is ConfigEntryState.LOADED + + assert hass.config_entries.async_update_entry(config_entry, data=new_data) + + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry.data == new_data diff --git a/tests/components/discovergy/test_sensor.py b/tests/components/discovergy/test_sensor.py new file mode 100644 index 00000000000..aba8229acf5 --- /dev/null +++ b/tests/components/discovergy/test_sensor.py @@ -0,0 +1,75 @@ +"""Tests Discovergy sensor component.""" +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from pydiscovergy.error import DiscovergyClientError, HTTPError, InvalidLogin +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + + +@pytest.mark.parametrize( + "state_name", + [ + "sensor.electricity_teststrasse_1_total_consumption", + "sensor.electricity_teststrasse_1_total_power", + "sensor.electricity_teststrasse_1_last_transmitted", + "sensor.gas_teststrasse_1_total_gas_consumption", + "sensor.gas_teststrasse_1_last_transmitted", + ], + ids=[ + "electricity total consumption", + "electricity total power", + "electricity last transmitted", + "gas total consumption", + "gas last transmitted", + ], +) +@pytest.mark.usefixtures("setup_integration") +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + state_name: str, + snapshot: SnapshotAssertion, +) -> None: + """Test sensor setup and update.""" + + entry = entity_registry.async_get(state_name) + assert entry == snapshot + + state = hass.states.get(state_name) + assert state == snapshot + + +@pytest.mark.parametrize( + "error", + [ + InvalidLogin, + HTTPError, + DiscovergyClientError, + Exception, + ], +) +@pytest.mark.usefixtures("setup_integration") +async def test_sensor_update_fail( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + discovergy: AsyncMock, + error: Exception, +) -> None: + """Test sensor errors.""" + state = hass.states.get("sensor.electricity_teststrasse_1_total_consumption") + assert state + assert state.state == "11934.8699715" + + discovergy.meter_last_reading.side_effect = error + + freezer.tick(timedelta(minutes=1)) + await hass.async_block_till_done() + + state = hass.states.get("sensor.electricity_teststrasse_1_total_consumption") + assert state + assert state.state == "unavailable" diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py new file mode 100644 index 00000000000..493fd93259f --- /dev/null +++ b/tests/components/dsmr/test_mbus_migration.py @@ -0,0 +1,212 @@ +"""Tests for the DSMR integration.""" +import datetime +from decimal import Decimal + +from homeassistant.components.dsmr.const import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_migrate_gas_to_mbus( + hass: HomeAssistant, entity_registry: er.EntityRegistry, dsmr_connection_fixture +) -> None: + """Test migration of unique_id.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + from dsmr_parser.obis_references import ( + BELGIUM_MBUS1_DEVICE_TYPE, + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS1_METER_READING2, + ) + from dsmr_parser.objects import CosemObject, MBusObject + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="/dev/ttyUSB0", + data={ + "port": "/dev/ttyUSB0", + "dsmr_version": "5B", + "precision": 4, + "reconnect_interval": 30, + "serial_id": "1234", + "serial_id_gas": "37464C4F32313139303333373331", + }, + options={ + "time_between_update": 0, + }, + ) + + mock_entry.add_to_hass(hass) + + old_unique_id = "37464C4F32313139303333373331_belgium_5min_gas_meter_reading" + + device_registry = hass.helpers.device_registry.async_get(hass) + device = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, mock_entry.entry_id)}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device.id, + unique_id=old_unique_id, + config_entry=mock_entry, + ) + assert entity.unique_id == old_unique_id + await hass.async_block_till_done() + + telegram = { + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373331", "unit": ""}], + ), + BELGIUM_MBUS1_METER_READING2: MBusObject( + BELGIUM_MBUS1_METER_READING2, + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "m3"}, + ], + ), + } + + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + dev_entities = er.async_entries_for_device( + entity_registry, device.id, include_disabled_entities=True + ) + assert not dev_entities + + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + is None + ) + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, DOMAIN, "37464C4F32313139303333373331" + ) + == "sensor.gas_meter_reading" + ) + + +async def test_migrate_gas_to_mbus_exists( + hass: HomeAssistant, entity_registry: er.EntityRegistry, dsmr_connection_fixture +) -> None: + """Test migration of unique_id.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + from dsmr_parser.obis_references import ( + BELGIUM_MBUS1_DEVICE_TYPE, + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS1_METER_READING2, + ) + from dsmr_parser.objects import CosemObject, MBusObject + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="/dev/ttyUSB0", + data={ + "port": "/dev/ttyUSB0", + "dsmr_version": "5B", + "precision": 4, + "reconnect_interval": 30, + "serial_id": "1234", + "serial_id_gas": "37464C4F32313139303333373331", + }, + options={ + "time_between_update": 0, + }, + ) + + mock_entry.add_to_hass(hass) + + old_unique_id = "37464C4F32313139303333373331_belgium_5min_gas_meter_reading" + + device_registry = hass.helpers.device_registry.async_get(hass) + device = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, mock_entry.entry_id)}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device.id, + unique_id=old_unique_id, + config_entry=mock_entry, + ) + assert entity.unique_id == old_unique_id + + device2 = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, "37464C4F32313139303333373331")}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading_alt", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device2.id, + unique_id="37464C4F32313139303333373331", + config_entry=mock_entry, + ) + await hass.async_block_till_done() + + telegram = { + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373331", "unit": ""}], + ), + BELGIUM_MBUS1_METER_READING2: MBusObject( + BELGIUM_MBUS1_METER_READING2, + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "m3"}, + ], + ), + } + + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + == "sensor.gas_meter_reading" + ) diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index 1895dd15dd1..0c71525be48 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -8,19 +8,8 @@ import asyncio import datetime from decimal import Decimal from itertools import chain, repeat -from typing import Literal from unittest.mock import DEFAULT, MagicMock -from dsmr_parser.obis_references import ( - BELGIUM_MBUS1_METER_READING1, - BELGIUM_MBUS1_METER_READING2, - BELGIUM_MBUS2_METER_READING1, - BELGIUM_MBUS2_METER_READING2, - BELGIUM_MBUS3_METER_READING1, - BELGIUM_MBUS3_METER_READING2, - BELGIUM_MBUS4_METER_READING1, - BELGIUM_MBUS4_METER_READING2, -) import pytest from homeassistant import config_entries @@ -35,6 +24,7 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, + STATE_UNKNOWN, UnitOfEnergy, UnitOfPower, UnitOfVolume, @@ -145,8 +135,8 @@ async def test_default_setup( # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser telegram_callback(telegram) - # after receiving telegram entities need to have the chance to update - await asyncio.sleep(0) + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() # ensure entities have new state value after incoming telegram power_consumption = hass.states.get("sensor.electricity_meter_power_consumption") @@ -321,7 +311,17 @@ async def test_v4_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: ) -async def test_v5_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: +@pytest.mark.parametrize( + ("value", "state"), + [ + (Decimal(745.690), "745.69"), + (Decimal(745.695), "745.695"), + (Decimal(0.000), STATE_UNKNOWN), + ], +) +async def test_v5_meter( + hass: HomeAssistant, dsmr_connection_fixture, value: Decimal, state: str +) -> None: """Test if v5 meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture @@ -348,7 +348,7 @@ async def test_v5_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: HOURLY_GAS_METER_READING, [ {"value": datetime.datetime.fromtimestamp(1551642213)}, - {"value": Decimal(745.695), "unit": "m3"}, + {"value": value, "unit": "m3"}, ], ), ELECTRICITY_ACTIVE_TARIFF: CosemObject( @@ -384,7 +384,7 @@ async def test_v5_meter(hass: HomeAssistant, dsmr_connection_fixture) -> None: # check if gas consumption is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption") - assert gas_consumption.state == "745.695" + assert gas_consumption.state == state assert gas_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.GAS assert ( gas_consumption.attributes.get(ATTR_STATE_CLASS) @@ -495,10 +495,18 @@ async def test_belgian_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No from dsmr_parser.obis_references import ( BELGIUM_CURRENT_AVERAGE_DEMAND, BELGIUM_MAXIMUM_DEMAND_MONTH, + BELGIUM_MBUS1_DEVICE_TYPE, + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, BELGIUM_MBUS1_METER_READING2, - BELGIUM_MBUS2_METER_READING2, + BELGIUM_MBUS2_DEVICE_TYPE, + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS2_METER_READING1, + BELGIUM_MBUS3_DEVICE_TYPE, + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, BELGIUM_MBUS3_METER_READING2, - BELGIUM_MBUS4_METER_READING2, + BELGIUM_MBUS4_DEVICE_TYPE, + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS4_METER_READING1, ELECTRICITY_ACTIVE_TARIFF, ) from dsmr_parser.objects import CosemObject, MBusObject @@ -509,41 +517,13 @@ async def test_belgian_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No "precision": 4, "reconnect_interval": 30, "serial_id": "1234", - "serial_id_gas": "5678", + "serial_id_gas": None, } entry_options = { "time_between_update": 0, } telegram = { - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, - [ - {"value": datetime.datetime.fromtimestamp(1551642213)}, - {"value": Decimal(745.695), "unit": "m3"}, - ], - ), - BELGIUM_MBUS2_METER_READING2: MBusObject( - BELGIUM_MBUS2_METER_READING2, - [ - {"value": datetime.datetime.fromtimestamp(1551642214)}, - {"value": Decimal(745.696), "unit": "m3"}, - ], - ), - BELGIUM_MBUS3_METER_READING2: MBusObject( - BELGIUM_MBUS3_METER_READING2, - [ - {"value": datetime.datetime.fromtimestamp(1551642215)}, - {"value": Decimal(745.697), "unit": "m3"}, - ], - ), - BELGIUM_MBUS4_METER_READING2: MBusObject( - BELGIUM_MBUS4_METER_READING2, - [ - {"value": datetime.datetime.fromtimestamp(1551642216)}, - {"value": Decimal(745.698), "unit": "m3"}, - ], - ), BELGIUM_CURRENT_AVERAGE_DEMAND: CosemObject( BELGIUM_CURRENT_AVERAGE_DEMAND, [{"value": Decimal(1.75), "unit": "kW"}], @@ -555,6 +535,62 @@ async def test_belgian_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No {"value": Decimal(4.11), "unit": "kW"}, ], ), + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373331", "unit": ""}], + ), + BELGIUM_MBUS1_METER_READING2: MBusObject( + BELGIUM_MBUS1_METER_READING2, + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "m3"}, + ], + ), + BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373332", "unit": ""}], + ), + BELGIUM_MBUS2_METER_READING1: MBusObject( + BELGIUM_MBUS2_METER_READING1, + [ + {"value": datetime.datetime.fromtimestamp(1551642214)}, + {"value": Decimal(678.695), "unit": "m3"}, + ], + ), + BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373333", "unit": ""}], + ), + BELGIUM_MBUS3_METER_READING2: MBusObject( + BELGIUM_MBUS3_METER_READING2, + [ + {"value": datetime.datetime.fromtimestamp(1551642215)}, + {"value": Decimal(12.12), "unit": "m3"}, + ], + ), + BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373334", "unit": ""}], + ), + BELGIUM_MBUS4_METER_READING1: MBusObject( + BELGIUM_MBUS4_METER_READING1, + [ + {"value": datetime.datetime.fromtimestamp(1551642216)}, + {"value": Decimal(13.13), "unit": "m3"}, + ], + ), ELECTRICITY_ACTIVE_TARIFF: CosemObject( ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] ), @@ -600,7 +636,7 @@ async def test_belgian_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No assert max_demand.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.KILO_WATT assert max_demand.attributes.get(ATTR_STATE_CLASS) is None - # check if gas consumption is parsed correctly + # check if gas consumption mbus1 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption") assert gas_consumption.state == "745.695" assert gas_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.GAS @@ -613,48 +649,69 @@ async def test_belgian_meter(hass: HomeAssistant, dsmr_connection_fixture) -> No == UnitOfVolume.CUBIC_METERS ) + # check if water usage mbus2 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption") + assert water_consumption.state == "678.695" + assert ( + water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + ) + assert ( + water_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) -@pytest.mark.parametrize( - ("key1", "key2", "key3", "gas_value"), - [ - ( - BELGIUM_MBUS1_METER_READING1, - BELGIUM_MBUS2_METER_READING2, - BELGIUM_MBUS3_METER_READING1, - "745.696", - ), - ( - BELGIUM_MBUS1_METER_READING2, - BELGIUM_MBUS2_METER_READING1, - BELGIUM_MBUS3_METER_READING2, - "745.695", - ), - ( - BELGIUM_MBUS4_METER_READING2, - BELGIUM_MBUS2_METER_READING1, - BELGIUM_MBUS3_METER_READING1, - "745.695", - ), - ( - BELGIUM_MBUS4_METER_READING1, - BELGIUM_MBUS2_METER_READING1, - BELGIUM_MBUS3_METER_READING2, - "745.697", - ), - ], -) -async def test_belgian_meter_alt( - hass: HomeAssistant, - dsmr_connection_fixture, - key1: Literal, - key2: Literal, - key3: Literal, - gas_value: str, -) -> None: + # check if gas consumption mbus1 is parsed correctly + gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption_2") + assert gas_consumption.state == "12.12" + assert gas_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.GAS + assert ( + gas_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + gas_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + + # check if water usage mbus2 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") + assert water_consumption.state == "13.13" + assert ( + water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + ) + assert ( + water_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + + +async def test_belgian_meter_alt(hass: HomeAssistant, dsmr_connection_fixture) -> None: """Test if Belgian meter is correctly parsed.""" (connection_factory, transport, protocol) = dsmr_connection_fixture - from dsmr_parser.objects import MBusObject + from dsmr_parser.obis_references import ( + BELGIUM_MBUS1_DEVICE_TYPE, + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS1_METER_READING1, + BELGIUM_MBUS2_DEVICE_TYPE, + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS2_METER_READING2, + BELGIUM_MBUS3_DEVICE_TYPE, + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS3_METER_READING1, + BELGIUM_MBUS4_DEVICE_TYPE, + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS4_METER_READING2, + ) + from dsmr_parser.objects import CosemObject, MBusObject entry_data = { "port": "/dev/ttyUSB0", @@ -662,32 +719,67 @@ async def test_belgian_meter_alt( "precision": 4, "reconnect_interval": 30, "serial_id": "1234", - "serial_id_gas": "5678", + "serial_id_gas": None, } entry_options = { "time_between_update": 0, } telegram = { - key1: MBusObject( - key1, - [ - {"value": datetime.datetime.fromtimestamp(1551642213)}, - {"value": Decimal(745.695), "unit": "m3"}, - ], + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "007", "unit": ""}] ), - key2: MBusObject( - key2, - [ - {"value": datetime.datetime.fromtimestamp(1551642214)}, - {"value": Decimal(745.696), "unit": "m3"}, - ], + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - key3: MBusObject( - key3, + BELGIUM_MBUS1_METER_READING1: MBusObject( + BELGIUM_MBUS1_METER_READING1, [ {"value": datetime.datetime.fromtimestamp(1551642215)}, - {"value": Decimal(745.697), "unit": "m3"}, + {"value": Decimal(123.456), "unit": "m3"}, + ], + ), + BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373332", "unit": ""}], + ), + BELGIUM_MBUS2_METER_READING2: MBusObject( + BELGIUM_MBUS2_METER_READING2, + [ + {"value": datetime.datetime.fromtimestamp(1551642216)}, + {"value": Decimal(678.901), "unit": "m3"}, + ], + ), + BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373333", "unit": ""}], + ), + BELGIUM_MBUS3_METER_READING1: MBusObject( + BELGIUM_MBUS3_METER_READING1, + [ + {"value": datetime.datetime.fromtimestamp(1551642217)}, + {"value": Decimal(12.12), "unit": "m3"}, + ], + ), + BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373334", "unit": ""}], + ), + BELGIUM_MBUS4_METER_READING2: MBusObject( + BELGIUM_MBUS4_METER_READING2, + [ + {"value": datetime.datetime.fromtimestamp(1551642218)}, + {"value": Decimal(13.13), "unit": "m3"}, ], ), } @@ -709,9 +801,24 @@ async def test_belgian_meter_alt( # after receiving telegram entities need to have the chance to be created await hass.async_block_till_done() - # check if gas consumption is parsed correctly + # check if water usage mbus1 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption") + assert water_consumption.state == "123.456" + assert ( + water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + ) + assert ( + water_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + + # check if gas consumption mbus2 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption") - assert gas_consumption.state == gas_value + assert gas_consumption.state == "678.901" assert gas_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.GAS assert ( gas_consumption.attributes.get(ATTR_STATE_CLASS) @@ -722,6 +829,157 @@ async def test_belgian_meter_alt( == UnitOfVolume.CUBIC_METERS ) + # check if water usage mbus3 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") + assert water_consumption.state == "12.12" + assert ( + water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + ) + assert ( + water_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + + # check if gas consumption mbus4 is parsed correctly + gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption_2") + assert gas_consumption.state == "13.13" + assert gas_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.GAS + assert ( + gas_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + gas_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + + +async def test_belgian_meter_mbus(hass: HomeAssistant, dsmr_connection_fixture) -> None: + """Test if Belgian meter is correctly parsed.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + from dsmr_parser.obis_references import ( + BELGIUM_MBUS1_DEVICE_TYPE, + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS2_DEVICE_TYPE, + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS3_DEVICE_TYPE, + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + BELGIUM_MBUS3_METER_READING2, + BELGIUM_MBUS4_DEVICE_TYPE, + BELGIUM_MBUS4_METER_READING1, + ELECTRICITY_ACTIVE_TARIFF, + ) + from dsmr_parser.objects import CosemObject, MBusObject + + entry_data = { + "port": "/dev/ttyUSB0", + "dsmr_version": "5B", + "precision": 4, + "reconnect_interval": 30, + "serial_id": "1234", + "serial_id_gas": None, + } + entry_options = { + "time_between_update": 0, + } + + telegram = { + ELECTRICITY_ACTIVE_TARIFF: CosemObject( + ELECTRICITY_ACTIVE_TARIFF, [{"value": "0003", "unit": ""}] + ), + BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "006", "unit": ""}] + ), + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373331", "unit": ""}], + ), + BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] + ), + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373332", "unit": ""}], + ), + BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( + BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + [{"value": "37464C4F32313139303333373333", "unit": ""}], + ), + BELGIUM_MBUS3_METER_READING2: MBusObject( + BELGIUM_MBUS3_METER_READING2, + [ + {"value": datetime.datetime.fromtimestamp(1551642217)}, + {"value": Decimal(12.12), "unit": "m3"}, + ], + ), + BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( + BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] + ), + BELGIUM_MBUS4_METER_READING1: MBusObject( + BELGIUM_MBUS4_METER_READING1, + [ + {"value": datetime.datetime.fromtimestamp(1551642218)}, + {"value": Decimal(13.13), "unit": "m3"}, + ], + ), + } + + mock_entry = MockConfigEntry( + domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options + ) + + mock_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + # tariff should be translated in human readable and have no unit + active_tariff = hass.states.get("sensor.electricity_meter_active_tariff") + assert active_tariff.state == "unknown" + + # check if gas consumption mbus2 is parsed correctly + gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption") + assert gas_consumption is None + + # check if water usage mbus3 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") + assert water_consumption is None + + # check if gas consumption mbus4 is parsed correctly + gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption_2") + assert gas_consumption is None + + # check if gas consumption mbus4 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption") + assert water_consumption.state == "13.13" + assert ( + water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + ) + assert ( + water_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + async def test_belgian_meter_low(hass: HomeAssistant, dsmr_connection_fixture) -> None: """Test if Belgian meter is correctly parsed.""" diff --git a/tests/components/ecobee/test_config_flow.py b/tests/components/ecobee/test_config_flow.py index 7d79a10e912..a0f34e3cd21 100644 --- a/tests/components/ecobee/test_config_flow.py +++ b/tests/components/ecobee/test_config_flow.py @@ -198,9 +198,7 @@ async def test_import_flow_triggered_with_ecobee_conf_and_valid_data_and_stale_t return_value=MOCK_ECOBEE_CONF, ), patch( "homeassistant.components.ecobee.config_flow.Ecobee" - ) as mock_ecobee, patch.object( - flow, "async_step_user" - ) as mock_async_step_user: + ) as mock_ecobee, patch.object(flow, "async_step_user") as mock_async_step_user: mock_ecobee = mock_ecobee.return_value mock_ecobee.refresh_tokens.return_value = False diff --git a/tests/components/electrasmart/test_config_flow.py b/tests/components/electrasmart/test_config_flow.py index f53bea3e96c..929259a0ccf 100644 --- a/tests/components/electrasmart/test_config_flow.py +++ b/tests/components/electrasmart/test_config_flow.py @@ -55,7 +55,8 @@ async def test_one_time_password(hass: HomeAssistant): "electrasmart.api.ElectraAPI.validate_one_time_password", return_value=mock_otp_response, ), patch( - "electrasmart.api.ElectraAPI.fetch_devices", return_value=[] + "electrasmart.api.ElectraAPI.fetch_devices", + return_value=[], ): result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/elkm1/test_config_flow.py b/tests/components/elkm1/test_config_flow.py index 216fc019778..5e33a8aa4c3 100644 --- a/tests/components/elkm1/test_config_flow.py +++ b/tests/components/elkm1/test_config_flow.py @@ -229,9 +229,7 @@ async def test_form_user_with_insecure_elk_times_out(hass: HomeAssistant) -> Non 0, ), patch( "homeassistant.components.elkm1.config_flow.LOGIN_TIMEOUT", 0 - ), _patch_discovery(), _patch_elk( - elk=mocked_elk - ): + ), _patch_discovery(), _patch_elk(elk=mocked_elk): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { diff --git a/tests/components/emulated_hue/test_hue_api.py b/tests/components/emulated_hue/test_hue_api.py index fb5ff265497..98f99349cac 100644 --- a/tests/components/emulated_hue/test_hue_api.py +++ b/tests/components/emulated_hue/test_hue_api.py @@ -1694,3 +1694,62 @@ async def test_specificly_exposed_entities( result_json = await async_get_lights(client) assert "1" in result_json + + +async def test_get_light_state_when_none(hass_hue: HomeAssistant, hue_client) -> None: + """Test the getting of light state when brightness is None.""" + hass_hue.states.async_set( + "light.ceiling_lights", + STATE_ON, + { + light.ATTR_BRIGHTNESS: None, + light.ATTR_RGB_COLOR: None, + light.ATTR_HS_COLOR: None, + light.ATTR_COLOR_TEMP: None, + light.ATTR_XY_COLOR: None, + light.ATTR_SUPPORTED_COLOR_MODES: [ + light.COLOR_MODE_COLOR_TEMP, + light.COLOR_MODE_HS, + light.COLOR_MODE_XY, + ], + light.ATTR_COLOR_MODE: light.COLOR_MODE_XY, + }, + ) + + light_json = await perform_get_light_state( + hue_client, "light.ceiling_lights", HTTPStatus.OK + ) + state = light_json["state"] + assert state[HUE_API_STATE_ON] is True + assert state[HUE_API_STATE_BRI] == 1 + assert state[HUE_API_STATE_HUE] == 0 + assert state[HUE_API_STATE_SAT] == 0 + assert state[HUE_API_STATE_CT] == 153 + + hass_hue.states.async_set( + "light.ceiling_lights", + STATE_OFF, + { + light.ATTR_BRIGHTNESS: None, + light.ATTR_RGB_COLOR: None, + light.ATTR_HS_COLOR: None, + light.ATTR_COLOR_TEMP: None, + light.ATTR_XY_COLOR: None, + light.ATTR_SUPPORTED_COLOR_MODES: [ + light.COLOR_MODE_COLOR_TEMP, + light.COLOR_MODE_HS, + light.COLOR_MODE_XY, + ], + light.ATTR_COLOR_MODE: light.COLOR_MODE_XY, + }, + ) + + light_json = await perform_get_light_state( + hue_client, "light.ceiling_lights", HTTPStatus.OK + ) + state = light_json["state"] + assert state[HUE_API_STATE_ON] is False + assert state[HUE_API_STATE_BRI] == 1 + assert state[HUE_API_STATE_HUE] == 0 + assert state[HUE_API_STATE_SAT] == 0 + assert state[HUE_API_STATE_CT] == 153 diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 41cbb239129..c1fb03545cb 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -89,7 +89,8 @@ async def setup_enphase_envoy_fixture(hass, config, mock_envoy): "homeassistant.components.enphase_envoy.Envoy", return_value=mock_envoy, ), patch( - "homeassistant.components.enphase_envoy.PLATFORMS", [] + "homeassistant.components.enphase_envoy.PLATFORMS", + [], ): assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() diff --git a/tests/components/epson/test_media_player.py b/tests/components/epson/test_media_player.py new file mode 100644 index 00000000000..874a12173d6 --- /dev/null +++ b/tests/components/epson/test_media_player.py @@ -0,0 +1,49 @@ +"""Tests for the epson integration.""" +from datetime import timedelta +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.epson.const import DOMAIN +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_set_unique_id( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, +): + """Test the unique id is set on runtime.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Epson", + data={CONF_HOST: "1.1.1.1"}, + entry_id="1cb78c095906279574a0442a1f0003ef", + ) + entry.add_to_hass(hass) + with patch("homeassistant.components.epson.Projector.get_power"): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.unique_id is None + entity_entry = entity_registry.async_get("media_player.epson") + assert entity_entry + assert entity_entry.unique_id == entry.entry_id + with patch( + "homeassistant.components.epson.Projector.get_power", return_value="01" + ), patch( + "homeassistant.components.epson.Projector.get_serial_number", return_value="123" + ), patch( + "homeassistant.components.epson.Projector.get_property", + ): + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + entity_entry = entity_registry.async_get("media_player.epson") + assert entity_entry + assert entity_entry.unique_id == "123" + assert entry.unique_id == "123" diff --git a/tests/components/esphome/bluetooth/test_client.py b/tests/components/esphome/bluetooth/test_client.py new file mode 100644 index 00000000000..7ed1403041d --- /dev/null +++ b/tests/components/esphome/bluetooth/test_client.py @@ -0,0 +1,62 @@ +"""Tests for ESPHomeClient.""" +from __future__ import annotations + +from aioesphomeapi import APIClient, APIVersion, BluetoothProxyFeature, DeviceInfo +from bleak.exc import BleakError +import pytest + +from homeassistant.components.bluetooth import HaBluetoothConnector +from homeassistant.components.esphome.bluetooth.cache import ESPHomeBluetoothCache +from homeassistant.components.esphome.bluetooth.client import ( + ESPHomeClient, + ESPHomeClientData, +) +from homeassistant.components.esphome.bluetooth.device import ESPHomeBluetoothDevice +from homeassistant.components.esphome.bluetooth.scanner import ESPHomeScanner +from homeassistant.core import HomeAssistant + +from tests.components.bluetooth import generate_ble_device + +ESP_MAC_ADDRESS = "AA:BB:CC:DD:EE:FF" +ESP_NAME = "proxy" + + +@pytest.fixture(name="client_data") +async def client_data_fixture( + hass: HomeAssistant, mock_client: APIClient +) -> ESPHomeClientData: + """Return a client data fixture.""" + connector = HaBluetoothConnector(ESPHomeClientData, ESP_MAC_ADDRESS, lambda: True) + return ESPHomeClientData( + bluetooth_device=ESPHomeBluetoothDevice(ESP_NAME, ESP_MAC_ADDRESS), + cache=ESPHomeBluetoothCache(), + client=mock_client, + device_info=DeviceInfo( + mac_address=ESP_MAC_ADDRESS, + name=ESP_NAME, + bluetooth_proxy_feature_flags=BluetoothProxyFeature.PASSIVE_SCAN + & BluetoothProxyFeature.ACTIVE_CONNECTIONS + & BluetoothProxyFeature.REMOTE_CACHING + & BluetoothProxyFeature.PAIRING + & BluetoothProxyFeature.CACHE_CLEARING + & BluetoothProxyFeature.RAW_ADVERTISEMENTS, + ), + api_version=APIVersion(1, 9), + title=ESP_NAME, + scanner=ESPHomeScanner( + hass, ESP_MAC_ADDRESS, ESP_NAME, lambda info: None, connector, True + ), + ) + + +async def test_client_usage_while_not_connected(client_data: ESPHomeClientData) -> None: + """Test client usage while not connected.""" + ble_device = generate_ble_device( + "CC:BB:AA:DD:EE:FF", details={"source": ESP_MAC_ADDRESS, "address_type": 1} + ) + + client = ESPHomeClient(ble_device, client_data=client_data) + with pytest.raises( + BleakError, match=f"{ESP_NAME}.*{ESP_MAC_ADDRESS}.*not connected" + ): + await client.write_gatt_char("test", b"test") is False diff --git a/tests/components/esphome/test_climate.py b/tests/components/esphome/test_climate.py index 7e00fd22a1c..065890fd623 100644 --- a/tests/components/esphome/test_climate.py +++ b/tests/components/esphome/test_climate.py @@ -15,8 +15,12 @@ from aioesphomeapi import ( ) from homeassistant.components.climate import ( + ATTR_CURRENT_HUMIDITY, ATTR_FAN_MODE, + ATTR_HUMIDITY, ATTR_HVAC_MODE, + ATTR_MAX_HUMIDITY, + ATTR_MIN_HUMIDITY, ATTR_PRESET_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, @@ -25,6 +29,7 @@ from homeassistant.components.climate import ( DOMAIN as CLIMATE_DOMAIN, FAN_HIGH, SERVICE_SET_FAN_MODE, + SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_SWING_MODE, @@ -312,3 +317,63 @@ async def test_climate_entity_with_step_and_target_temp( [call(key=1, swing_mode=ClimateSwingMode.BOTH)] ) mock_client.climate_command.reset_mock() + + +async def test_climate_entity_with_humidity( + hass: HomeAssistant, mock_client: APIClient, mock_generic_device_entry +) -> None: + """Test a generic climate entity with humidity.""" + entity_info = [ + ClimateInfo( + object_id="myclimate", + key=1, + name="my climate", + unique_id="my_climate", + supports_current_temperature=True, + supports_two_point_target_temperature=True, + supports_action=True, + visual_min_temperature=10.0, + visual_max_temperature=30.0, + supports_current_humidity=True, + supports_target_humidity=True, + visual_min_humidity=10.1, + visual_max_humidity=29.7, + ) + ] + states = [ + ClimateState( + key=1, + mode=ClimateMode.AUTO, + action=ClimateAction.COOLING, + current_temperature=30, + target_temperature=20, + fan_mode=ClimateFanMode.AUTO, + swing_mode=ClimateSwingMode.BOTH, + current_humidity=20.1, + target_humidity=25.7, + ) + ] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("climate.test_myclimate") + assert state is not None + assert state.state == HVACMode.AUTO + attributes = state.attributes + assert attributes[ATTR_CURRENT_HUMIDITY] == 20 + assert attributes[ATTR_HUMIDITY] == 26 + assert attributes[ATTR_MAX_HUMIDITY] == 30 + assert attributes[ATTR_MIN_HUMIDITY] == 10 + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HUMIDITY, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_HUMIDITY: 23}, + blocking=True, + ) + mock_client.climate_command.assert_has_calls([call(key=1, target_humidity=23)]) + mock_client.climate_command.reset_mock() diff --git a/tests/components/esphome/test_entity.py b/tests/components/esphome/test_entity.py index fdc57b2dc24..9a5cb441f28 100644 --- a/tests/components/esphome/test_entity.py +++ b/tests/components/esphome/test_entity.py @@ -13,7 +13,13 @@ from aioesphomeapi import ( UserService, ) -from homeassistant.const import ATTR_RESTORED, STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.const import ( + ATTR_RESTORED, + EVENT_HOMEASSISTANT_STOP, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant from .conftest import MockESPHomeDevice @@ -231,6 +237,19 @@ async def test_deep_sleep_device( assert state is not None assert state.state == STATE_UNAVAILABLE + await mock_device.mock_connect() + await hass.async_block_till_done() + state = hass.states.get("binary_sensor.test_mybinary_sensor") + assert state is not None + assert state.state == STATE_ON + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + # Verify we do not dispatch any more state updates or + # availability updates after the stop event is fired + state = hass.states.get("binary_sensor.test_mybinary_sensor") + assert state is not None + assert state.state == STATE_ON + async def test_esphome_device_without_friendly_name( hass: HomeAssistant, diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index d297dddee4a..244e7487ed3 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -1,6 +1,6 @@ """Test ESPHome manager.""" from collections.abc import Awaitable, Callable -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, call from aioesphomeapi import APIClient, DeviceInfo, EntityInfo, EntityState, UserService import pytest @@ -16,6 +16,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component from .conftest import MockESPHomeDevice @@ -332,3 +333,39 @@ async def test_connection_aborted_wrong_device( await hass.async_block_till_done() assert len(new_info.mock_calls) == 1 assert "Unexpected device found at" not in caplog.text + + +async def test_debug_logging( + mock_client: APIClient, + hass: HomeAssistant, + mock_generic_device_entry: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockConfigEntry], + ], +) -> None: + """Test enabling and disabling debug logging.""" + assert await async_setup_component(hass, "logger", {"logger": {}}) + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) + await hass.services.async_call( + "logger", + "set_level", + {"homeassistant.components.esphome": "DEBUG"}, + blocking=True, + ) + await hass.async_block_till_done() + mock_client.set_debug.assert_has_calls([call(True)]) + + mock_client.reset_mock() + await hass.services.async_call( + "logger", + "set_level", + {"homeassistant.components.esphome": "WARNING"}, + blocking=True, + ) + await hass.async_block_till_done() + mock_client.set_debug.assert_has_calls([call(False)]) diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index d7b04f8448c..9ab00421cbc 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -100,7 +100,8 @@ async def test_update_entity( ) as mock_compile, patch( "esphome_dashboard_api.ESPHomeDashboardAPI.upload", return_value=True ) as mock_upload, pytest.raises( - HomeAssistantError, match="compiling" + HomeAssistantError, + match="compiling", ): await hass.services.async_call( "update", @@ -120,7 +121,8 @@ async def test_update_entity( ) as mock_compile, patch( "esphome_dashboard_api.ESPHomeDashboardAPI.upload", return_value=False ) as mock_upload, pytest.raises( - HomeAssistantError, match="OTA" + HomeAssistantError, + match="OTA", ): await hass.services.async_call( "update", diff --git a/tests/components/esphome/test_voice_assistant.py b/tests/components/esphome/test_voice_assistant.py index ca74c99f0cd..38a33bfdec2 100644 --- a/tests/components/esphome/test_voice_assistant.py +++ b/tests/components/esphome/test_voice_assistant.py @@ -337,6 +337,28 @@ async def test_send_tts_called( mock_send_tts.assert_called_with(_TEST_MEDIA_ID) +async def test_send_tts_not_called_when_empty( + hass: HomeAssistant, + voice_assistant_udp_server_v1: VoiceAssistantUDPServer, + voice_assistant_udp_server_v2: VoiceAssistantUDPServer, +) -> None: + """Test the UDP server with a v1/v2 device doesn't call _send_tts when the output is empty.""" + with patch( + "homeassistant.components.esphome.voice_assistant.VoiceAssistantUDPServer._send_tts" + ) as mock_send_tts: + voice_assistant_udp_server_v1._event_callback( + PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) + ) + + mock_send_tts.assert_not_called() + + voice_assistant_udp_server_v2._event_callback( + PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) + ) + + mock_send_tts.assert_not_called() + + async def test_send_tts( hass: HomeAssistant, voice_assistant_udp_server_v2: VoiceAssistantUDPServer, diff --git a/tests/components/evil_genius_labs/conftest.py b/tests/components/evil_genius_labs/conftest.py index 66dd8979d67..a4f10fe97c4 100644 --- a/tests/components/evil_genius_labs/conftest.py +++ b/tests/components/evil_genius_labs/conftest.py @@ -51,7 +51,8 @@ async def setup_evil_genius_labs( "pyevilgenius.EvilGeniusDevice.get_product", return_value=product_fixture, ), patch( - "homeassistant.components.evil_genius_labs.PLATFORMS", platforms + "homeassistant.components.evil_genius_labs.PLATFORMS", + platforms, ): assert await async_setup_component(hass, "evil_genius_labs", {}) await hass.async_block_till_done() diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index 8338afc9c68..ec421141768 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -1,8 +1,19 @@ """Tests for fan platforms.""" import pytest -from homeassistant.components.fan import FanEntity +from homeassistant.components.fan import ( + ATTR_PRESET_MODE, + ATTR_PRESET_MODES, + DOMAIN, + SERVICE_SET_PRESET_MODE, + FanEntity, + NotValidPresetModeError, +) from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er +from homeassistant.setup import async_setup_component + +from tests.testing_config.custom_components.test.fan import MockFan class BaseFan(FanEntity): @@ -82,3 +93,55 @@ def test_fanentity_attributes(attribute_name, attribute_value) -> None: fan = BaseFan() setattr(fan, f"_attr_{attribute_name}", attribute_value) assert getattr(fan, attribute_name) == attribute_value + + +async def test_preset_mode_validation( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + entity_registry: er.EntityRegistry, + enable_custom_integrations: None, +) -> None: + """Test preset mode validation.""" + + await hass.async_block_till_done() + + platform = getattr(hass.components, "test.fan") + platform.init(empty=False) + + assert await async_setup_component(hass, "fan", {"fan": {"platform": "test"}}) + await hass.async_block_till_done() + + test_fan: MockFan = platform.ENTITIES["support_preset_mode"] + await hass.async_block_till_done() + + state = hass.states.get("fan.support_fan_with_preset_mode_support") + assert state.attributes.get(ATTR_PRESET_MODES) == ["auto", "eco"] + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_PRESET_MODE, + { + "entity_id": "fan.support_fan_with_preset_mode_support", + "preset_mode": "eco", + }, + blocking=True, + ) + + state = hass.states.get("fan.support_fan_with_preset_mode_support") + assert state.attributes.get(ATTR_PRESET_MODE) == "eco" + + with pytest.raises(NotValidPresetModeError) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_PRESET_MODE, + { + "entity_id": "fan.support_fan_with_preset_mode_support", + "preset_mode": "invalid", + }, + blocking=True, + ) + assert exc.value.translation_key == "not_valid_preset_mode" + + with pytest.raises(NotValidPresetModeError) as exc: + await test_fan._valid_preset_mode_or_raise("invalid") + assert exc.value.translation_key == "not_valid_preset_mode" diff --git a/tests/components/fastdotcom/__init__.py b/tests/components/fastdotcom/__init__.py new file mode 100644 index 00000000000..4c2ca6301af --- /dev/null +++ b/tests/components/fastdotcom/__init__.py @@ -0,0 +1 @@ +"""Fast.com integration tests.""" diff --git a/tests/components/fastdotcom/test_config_flow.py b/tests/components/fastdotcom/test_config_flow.py new file mode 100644 index 00000000000..4314a7688d8 --- /dev/null +++ b/tests/components/fastdotcom/test_config_flow.py @@ -0,0 +1,74 @@ +"""Test for the Fast.com config flow.""" +from unittest.mock import patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.fastdotcom.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_user_form(hass: HomeAssistant) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.fastdotcom.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == "Fast.com" + assert result["data"] == {} + assert result["options"] == {} + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize("source", [SOURCE_USER, SOURCE_IMPORT]) +async def test_single_instance_allowed( + hass: HomeAssistant, + source: str, +) -> None: + """Test we abort if already setup.""" + mock_config_entry = MockConfigEntry(domain=DOMAIN) + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": source} + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" + + +async def test_import_flow_success(hass: HomeAssistant) -> None: + """Test import flow.""" + with patch( + "homeassistant.components.fastdotcom.__init__.SpeedtestData", + return_value={"download": "50"}, + ), patch("homeassistant.components.fastdotcom.sensor.SpeedtestSensor"): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == "Fast.com" + assert result["data"] == {} + assert result["options"] == {} diff --git a/tests/components/flexit_bacnet/__init__.py b/tests/components/flexit_bacnet/__init__.py new file mode 100644 index 00000000000..4cae6e4f4bf --- /dev/null +++ b/tests/components/flexit_bacnet/__init__.py @@ -0,0 +1 @@ +"""Tests for the Flexit Nordic (BACnet) integration.""" diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py new file mode 100644 index 00000000000..b136b134e01 --- /dev/null +++ b/tests/components/flexit_bacnet/conftest.py @@ -0,0 +1,44 @@ +"""Configuration for Flexit Nordic (BACnet) tests.""" +from unittest.mock import patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.flexit_bacnet.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + + +@pytest.fixture +async def flow_id(hass: HomeAssistant) -> str: + """Return initial ID for user-initiated configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + + return result["flow_id"] + + +@pytest.fixture(autouse=True) +def mock_serial_number_and_device_name(): + """Mock serial number of the device.""" + with patch( + "homeassistant.components.flexit_bacnet.config_flow.FlexitBACnet.serial_number", + "0000-0001", + ), patch( + "homeassistant.components.flexit_bacnet.config_flow.FlexitBACnet.device_name", + "Device Name", + ): + yield + + +@pytest.fixture +def mock_setup_entry(): + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.flexit_bacnet.async_setup_entry", return_value=True + ) as setup_entry_mock: + yield setup_entry_mock diff --git a/tests/components/flexit_bacnet/test_config_flow.py b/tests/components/flexit_bacnet/test_config_flow.py new file mode 100644 index 00000000000..ed513587af6 --- /dev/null +++ b/tests/components/flexit_bacnet/test_config_flow.py @@ -0,0 +1,120 @@ +"""Test the Flexit Nordic (BACnet) config flow.""" +import asyncio.exceptions +from unittest.mock import patch + +from flexit_bacnet import DecodingError +import pytest + +from homeassistant.components.flexit_bacnet.const import DOMAIN +from homeassistant.const import CONF_DEVICE_ID, CONF_IP_ADDRESS +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form(hass: HomeAssistant, flow_id: str, mock_setup_entry) -> None: + """Test we get the form and the happy path works.""" + with patch( + "homeassistant.components.flexit_bacnet.config_flow.FlexitBACnet.update" + ): + result = await hass.config_entries.flow.async_configure( + flow_id, + { + CONF_IP_ADDRESS: "1.1.1.1", + CONF_DEVICE_ID: 2, + }, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == "Device Name" + assert result["context"]["unique_id"] == "0000-0001" + assert result["data"] == { + CONF_IP_ADDRESS: "1.1.1.1", + CONF_DEVICE_ID: 2, + } + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("error", "message"), + [ + ( + asyncio.exceptions.TimeoutError, + "cannot_connect", + ), + (ConnectionError, "cannot_connect"), + (DecodingError, "cannot_connect"), + (Exception(), "unknown"), + ], +) +async def test_flow_fails( + hass: HomeAssistant, flow_id: str, error: Exception, message: str, mock_setup_entry +) -> None: + """Test that we return 'cannot_connect' error when attempting to connect to an incorrect IP address. + + The flexit_bacnet library raises asyncio.exceptions.TimeoutError in that scenario. + """ + with patch( + "homeassistant.components.flexit_bacnet.config_flow.FlexitBACnet.update", + side_effect=error, + ): + result = await hass.config_entries.flow.async_configure( + flow_id, + { + CONF_IP_ADDRESS: "1.1.1.1", + CONF_DEVICE_ID: 2, + }, + ) + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": message} + assert len(mock_setup_entry.mock_calls) == 0 + + # ensure that user can recover from this error + with patch( + "homeassistant.components.flexit_bacnet.config_flow.FlexitBACnet.update" + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.1.1.1", + CONF_DEVICE_ID: 2, + }, + ) + + assert result2["type"] == FlowResultType.CREATE_ENTRY + assert result2["title"] == "Device Name" + assert result2["context"]["unique_id"] == "0000-0001" + assert result2["data"] == { + CONF_IP_ADDRESS: "1.1.1.1", + CONF_DEVICE_ID: 2, + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_device_already_exist(hass: HomeAssistant, flow_id: str) -> None: + """Test that we cannot add already added device.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_IP_ADDRESS: "1.1.1.1", + CONF_DEVICE_ID: 2, + }, + unique_id="0000-0001", + ) + entry.add_to_hass(hass) + with patch( + "homeassistant.components.flexit_bacnet.config_flow.FlexitBACnet.update" + ): + result = await hass.config_entries.flow.async_configure( + flow_id, + { + CONF_IP_ADDRESS: "1.1.1.1", + CONF_DEVICE_ID: 2, + }, + ) + await hass.async_block_till_done() + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/freebox/conftest.py b/tests/components/freebox/conftest.py index 8a6590d1105..3ba175cbc75 100644 --- a/tests/components/freebox/conftest.py +++ b/tests/components/freebox/conftest.py @@ -1,6 +1,8 @@ """Test helpers for Freebox.""" +import json from unittest.mock import AsyncMock, PropertyMock, patch +from freebox_api.exceptions import HttpRequestError import pytest from homeassistant.core import HomeAssistant @@ -10,12 +12,14 @@ from .const import ( DATA_CALL_GET_CALLS_LOG, DATA_CONNECTION_GET_STATUS, DATA_HOME_GET_NODES, - DATA_HOME_PIR_GET_VALUES, + DATA_HOME_PIR_GET_VALUE, + DATA_HOME_SET_VALUE, DATA_LAN_GET_HOSTS_LIST, + DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE, DATA_STORAGE_GET_DISKS, DATA_STORAGE_GET_RAIDS, DATA_SYSTEM_GET_CONFIG, - WIFI_GET_GLOBAL_CONFIG, + DATA_WIFI_GET_GLOBAL_CONFIG, ) from tests.common import MockConfigEntry @@ -41,7 +45,9 @@ def enable_all_entities(): @pytest.fixture -def mock_device_registry_devices(hass: HomeAssistant, device_registry): +def mock_device_registry_devices( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +): """Create device registry devices so the device tracker entities are enabled.""" config_entry = MockConfigEntry(domain="something_else") config_entry.add_to_hass(hass) @@ -79,11 +85,30 @@ def mock_router(mock_device_registry_devices): return_value=DATA_CONNECTION_GET_STATUS ) # switch - instance.wifi.get_global_config = AsyncMock(return_value=WIFI_GET_GLOBAL_CONFIG) + instance.wifi.get_global_config = AsyncMock( + return_value=DATA_WIFI_GET_GLOBAL_CONFIG + ) # home devices instance.home.get_home_nodes = AsyncMock(return_value=DATA_HOME_GET_NODES) instance.home.get_home_endpoint_value = AsyncMock( - return_value=DATA_HOME_PIR_GET_VALUES + return_value=DATA_HOME_PIR_GET_VALUE + ) + instance.home.set_home_endpoint_value = AsyncMock( + return_value=DATA_HOME_SET_VALUE ) instance.close = AsyncMock() yield service_mock + + +@pytest.fixture(name="router_bridge_mode") +def mock_router_bridge_mode(mock_device_registry_devices, router): + """Mock a successful connection to Freebox Bridge mode.""" + + router().lan.get_hosts_list = AsyncMock( + side_effect=HttpRequestError( + "Request failed (APIResponse: %s)" + % json.dumps(DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE) + ) + ) + + return router diff --git a/tests/components/freebox/const.py b/tests/components/freebox/const.py index a7dd3132719..ae07b39c5e8 100644 --- a/tests/components/freebox/const.py +++ b/tests/components/freebox/const.py @@ -21,11 +21,15 @@ DATA_STORAGE_GET_DISKS = load_json_array_fixture("freebox/storage_get_disks.json DATA_STORAGE_GET_RAIDS = load_json_array_fixture("freebox/storage_get_raids.json") # switch -WIFI_GET_GLOBAL_CONFIG = load_json_object_fixture("freebox/wifi_get_global_config.json") +DATA_WIFI_GET_GLOBAL_CONFIG = load_json_object_fixture( + "freebox/wifi_get_global_config.json" +) # device_tracker DATA_LAN_GET_HOSTS_LIST = load_json_array_fixture("freebox/lan_get_hosts_list.json") - +DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE = load_json_object_fixture( + "freebox/lan_get_hosts_list_bridge.json" +) # Home # ALL @@ -33,10 +37,14 @@ DATA_HOME_GET_NODES = load_json_array_fixture("freebox/home_get_nodes.json") # Home # PIR node id 26, endpoint id 6 -DATA_HOME_PIR_GET_VALUES = load_json_object_fixture("freebox/home_pir_get_values.json") +DATA_HOME_PIR_GET_VALUE = load_json_object_fixture("freebox/home_pir_get_value.json") # Home # ALARM node id 7, endpoint id 11 -DATA_HOME_ALARM_GET_VALUES = load_json_object_fixture( - "freebox/home_alarm_get_values.json" +DATA_HOME_ALARM_GET_VALUE = load_json_object_fixture( + "freebox/home_alarm_get_value.json" ) + +# Home +# Set a node value with success +DATA_HOME_SET_VALUE = load_json_object_fixture("freebox/home_set_value.json") diff --git a/tests/components/freebox/fixtures/home_alarm_get_values.json b/tests/components/freebox/fixtures/home_alarm_get_value.json similarity index 64% rename from tests/components/freebox/fixtures/home_alarm_get_values.json rename to tests/components/freebox/fixtures/home_alarm_get_value.json index 1e43a428296..6e4ad4d0538 100644 --- a/tests/components/freebox/fixtures/home_alarm_get_values.json +++ b/tests/components/freebox/fixtures/home_alarm_get_value.json @@ -1,5 +1,5 @@ { "refresh": 2000, - "value": "alarm2_armed", + "value": "alarm1_armed", "value_type": "string" } diff --git a/tests/components/freebox/fixtures/home_pir_get_values.json b/tests/components/freebox/fixtures/home_pir_get_value.json similarity index 100% rename from tests/components/freebox/fixtures/home_pir_get_values.json rename to tests/components/freebox/fixtures/home_pir_get_value.json diff --git a/tests/components/freebox/fixtures/home_set_value.json b/tests/components/freebox/fixtures/home_set_value.json new file mode 100644 index 00000000000..5550c6db40a --- /dev/null +++ b/tests/components/freebox/fixtures/home_set_value.json @@ -0,0 +1,3 @@ +{ + "success": true +} diff --git a/tests/components/freebox/fixtures/lan_get_hosts_list_bridge.json b/tests/components/freebox/fixtures/lan_get_hosts_list_bridge.json new file mode 100644 index 00000000000..4afda465712 --- /dev/null +++ b/tests/components/freebox/fixtures/lan_get_hosts_list_bridge.json @@ -0,0 +1,5 @@ +{ + "msg": "Erreur lors de la récupération de la liste des hôtes : Interface invalide", + "success": false, + "error_code": "nodev" +} diff --git a/tests/components/freebox/test_alarm_control_panel.py b/tests/components/freebox/test_alarm_control_panel.py index d24c747f2a3..44286f18b87 100644 --- a/tests/components/freebox/test_alarm_control_panel.py +++ b/tests/components/freebox/test_alarm_control_panel.py @@ -1,57 +1,68 @@ -"""Tests for the Freebox sensors.""" +"""Tests for the Freebox alarms.""" from copy import deepcopy from unittest.mock import Mock from freezegun.api import FrozenDateTimeFactory -import pytest from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_CONTROL_PANEL, + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, ) from homeassistant.components.freebox import SCAN_INTERVAL from homeassistant.const import ( + ATTR_ENTITY_ID, SERVICE_ALARM_ARM_AWAY, - SERVICE_ALARM_ARM_CUSTOM_BYPASS, SERVICE_ALARM_ARM_HOME, - SERVICE_ALARM_ARM_NIGHT, - SERVICE_ALARM_ARM_VACATION, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMING, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, + STATE_UNKNOWN, ) -from homeassistant.core import HomeAssistant, State -from homeassistant.helpers.state import async_reproduce_state +from homeassistant.core import HomeAssistant from .common import setup_platform -from .const import DATA_HOME_ALARM_GET_VALUES +from .const import DATA_HOME_ALARM_GET_VALUE, DATA_HOME_GET_NODES -from tests.common import async_fire_time_changed, async_mock_service +from tests.common import async_fire_time_changed -async def test_panel( +async def test_alarm_changed_from_external( hass: HomeAssistant, freezer: FrozenDateTimeFactory, router: Mock ) -> None: - """Test home binary sensors.""" - await setup_platform(hass, ALARM_CONTROL_PANEL) + """Test Freebox Home alarm which state depends on external changes.""" + data_get_home_nodes = deepcopy(DATA_HOME_GET_NODES) + data_get_home_endpoint_value = deepcopy(DATA_HOME_ALARM_GET_VALUE) + + # Add remove arm_home feature + ALARM_NODE_ID = 7 + ALARM_HOME_ENDPOINT_ID = 2 + del data_get_home_nodes[ALARM_NODE_ID]["type"]["endpoints"][ALARM_HOME_ENDPOINT_ID] + router().home.get_home_nodes.return_value = data_get_home_nodes + + data_get_home_endpoint_value["value"] = "alarm1_arming" + router().home.get_home_endpoint_value.return_value = data_get_home_endpoint_value + + await setup_platform(hass, ALARM_CONTROL_PANEL_DOMAIN) + + # Attributes + assert hass.states.get("alarm_control_panel.systeme_d_alarme").attributes[ + "supported_features" + ] == ( + AlarmControlPanelEntityFeature.ARM_AWAY | AlarmControlPanelEntityFeature.TRIGGER + ) # Initial state - assert hass.states.get("alarm_control_panel.systeme_d_alarme").state == "unknown" assert ( - hass.states.get("alarm_control_panel.systeme_d_alarme").attributes[ - "supported_features" - ] - == AlarmControlPanelEntityFeature.ARM_AWAY + hass.states.get("alarm_control_panel.systeme_d_alarme").state + == STATE_ALARM_ARMING ) # Now simulate a changed status - data_get_home_endpoint_value = deepcopy(DATA_HOME_ALARM_GET_VALUES) + data_get_home_endpoint_value["value"] = "alarm1_armed" router().home.get_home_endpoint_value.return_value = data_get_home_endpoint_value # Simulate an update @@ -60,64 +71,105 @@ async def test_panel( await hass.async_block_till_done() assert ( - hass.states.get("alarm_control_panel.systeme_d_alarme").state == "armed_night" - ) - # Fake that the entity is triggered. - hass.states.async_set("alarm_control_panel.systeme_d_alarme", STATE_ALARM_DISARMED) - assert hass.states.get("alarm_control_panel.systeme_d_alarme").state == "disarmed" - - -async def test_reproducing_states( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test reproducing Alarm control panel states.""" - hass.states.async_set( - "alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY, {} - ) - hass.states.async_set( - "alarm_control_panel.entity_armed_custom_bypass", - STATE_ALARM_ARMED_CUSTOM_BYPASS, - {}, - ) - hass.states.async_set( - "alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME, {} - ) - hass.states.async_set( - "alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT, {} - ) - hass.states.async_set( - "alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_VACATION, {} - ) - hass.states.async_set( - "alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED, {} - ) - hass.states.async_set( - "alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED, {} + hass.states.get("alarm_control_panel.systeme_d_alarme").state + == STATE_ALARM_ARMED_AWAY ) - async_mock_service(hass, "alarm_control_panel", SERVICE_ALARM_ARM_AWAY) - async_mock_service(hass, "alarm_control_panel", SERVICE_ALARM_ARM_CUSTOM_BYPASS) - async_mock_service(hass, "alarm_control_panel", SERVICE_ALARM_ARM_HOME) - async_mock_service(hass, "alarm_control_panel", SERVICE_ALARM_ARM_NIGHT) - async_mock_service(hass, "alarm_control_panel", SERVICE_ALARM_ARM_VACATION) - async_mock_service(hass, "alarm_control_panel", SERVICE_ALARM_DISARM) - async_mock_service(hass, "alarm_control_panel", SERVICE_ALARM_TRIGGER) - # These calls should do nothing as entities already in desired state - await async_reproduce_state( - hass, - [ - State("alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY), - State( - "alarm_control_panel.entity_armed_custom_bypass", - STATE_ALARM_ARMED_CUSTOM_BYPASS, - ), - State("alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME), - State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT), - State( - "alarm_control_panel.entity_armed_vacation", STATE_ALARM_ARMED_VACATION - ), - State("alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED), - State("alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED), - ], +async def test_alarm_changed_from_hass(hass: HomeAssistant, router: Mock) -> None: + """Test Freebox Home alarm which state depends on HA.""" + data_get_home_endpoint_value = deepcopy(DATA_HOME_ALARM_GET_VALUE) + + data_get_home_endpoint_value["value"] = "alarm1_armed" + router().home.get_home_endpoint_value.return_value = data_get_home_endpoint_value + + await setup_platform(hass, ALARM_CONTROL_PANEL_DOMAIN) + + # Attributes + assert hass.states.get("alarm_control_panel.systeme_d_alarme").attributes[ + "supported_features" + ] == ( + AlarmControlPanelEntityFeature.ARM_AWAY + | AlarmControlPanelEntityFeature.ARM_HOME + | AlarmControlPanelEntityFeature.TRIGGER + ) + + # Initial state: arm_away + assert ( + hass.states.get("alarm_control_panel.systeme_d_alarme").state + == STATE_ALARM_ARMED_AWAY + ) + + # Now call for a change -> disarmed + data_get_home_endpoint_value["value"] = "idle" + router().home.get_home_endpoint_value.return_value = data_get_home_endpoint_value + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: ["alarm_control_panel.systeme_d_alarme"]}, + blocking=True, + ) + + assert ( + hass.states.get("alarm_control_panel.systeme_d_alarme").state + == STATE_ALARM_DISARMED + ) + + # Now call for a change -> arm_away + data_get_home_endpoint_value["value"] = "alarm1_arming" + router().home.get_home_endpoint_value.return_value = data_get_home_endpoint_value + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + {ATTR_ENTITY_ID: ["alarm_control_panel.systeme_d_alarme"]}, + blocking=True, + ) + + assert ( + hass.states.get("alarm_control_panel.systeme_d_alarme").state + == STATE_ALARM_ARMING + ) + + # Now call for a change -> arm_home + data_get_home_endpoint_value["value"] = "alarm2_armed" + # in reality: alarm2_arming then alarm2_armed + router().home.get_home_endpoint_value.return_value = data_get_home_endpoint_value + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_HOME, + {ATTR_ENTITY_ID: ["alarm_control_panel.systeme_d_alarme"]}, + blocking=True, + ) + + assert ( + hass.states.get("alarm_control_panel.systeme_d_alarme").state + == STATE_ALARM_ARMED_HOME + ) + + # Now call for a change -> trigger + data_get_home_endpoint_value["value"] = "alarm1_alert_timer" + router().home.get_home_endpoint_value.return_value = data_get_home_endpoint_value + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_TRIGGER, + {ATTR_ENTITY_ID: ["alarm_control_panel.systeme_d_alarme"]}, + blocking=True, + ) + + assert ( + hass.states.get("alarm_control_panel.systeme_d_alarme").state + == STATE_ALARM_TRIGGERED + ) + + +async def test_alarm_undefined_fetch_status(hass: HomeAssistant, router: Mock) -> None: + """Test Freebox Home alarm which state is undefined or null.""" + data_get_home_endpoint_value = deepcopy(DATA_HOME_ALARM_GET_VALUE) + data_get_home_endpoint_value["value"] = None + router().home.get_home_endpoint_value.return_value = data_get_home_endpoint_value + + await setup_platform(hass, ALARM_CONTROL_PANEL_DOMAIN) + + assert ( + hass.states.get("alarm_control_panel.systeme_d_alarme").state == STATE_UNKNOWN ) diff --git a/tests/components/freebox/test_binary_sensor.py b/tests/components/freebox/test_binary_sensor.py index 2fd308ea667..ee07af786be 100644 --- a/tests/components/freebox/test_binary_sensor.py +++ b/tests/components/freebox/test_binary_sensor.py @@ -1,4 +1,4 @@ -"""Tests for the Freebox sensors.""" +"""Tests for the Freebox binary sensors.""" from copy import deepcopy from unittest.mock import Mock @@ -13,7 +13,7 @@ from homeassistant.const import ATTR_DEVICE_CLASS from homeassistant.core import HomeAssistant from .common import setup_platform -from .const import DATA_HOME_PIR_GET_VALUES, DATA_STORAGE_GET_RAIDS +from .const import DATA_HOME_PIR_GET_VALUE, DATA_STORAGE_GET_RAIDS from tests.common import async_fire_time_changed @@ -73,7 +73,7 @@ async def test_home( assert hass.states.get("binary_sensor.ouverture_porte_couvercle").state == "off" # Now simulate a changed status - data_home_get_values_changed = deepcopy(DATA_HOME_PIR_GET_VALUES) + data_home_get_values_changed = deepcopy(DATA_HOME_PIR_GET_VALUE) data_home_get_values_changed["value"] = True router().home.get_home_endpoint_value.return_value = data_home_get_values_changed diff --git a/tests/components/freebox/test_button.py b/tests/components/freebox/test_button.py index 5f72b5968f1..209ab1e9fc2 100644 --- a/tests/components/freebox/test_button.py +++ b/tests/components/freebox/test_button.py @@ -1,4 +1,4 @@ -"""Tests for the Freebox config flow.""" +"""Tests for the Freebox buttons.""" from unittest.mock import ANY, AsyncMock, Mock, patch from pytest_unordered import unordered diff --git a/tests/components/freebox/test_device_tracker.py b/tests/components/freebox/test_device_tracker.py new file mode 100644 index 00000000000..6d4ca5fb7ee --- /dev/null +++ b/tests/components/freebox/test_device_tracker.py @@ -0,0 +1,49 @@ +"""Tests for the Freebox device trackers.""" +from unittest.mock import Mock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN +from homeassistant.components.freebox import SCAN_INTERVAL +from homeassistant.core import HomeAssistant + +from .common import setup_platform + +from tests.common import async_fire_time_changed + + +async def test_router_mode( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + router: Mock, +) -> None: + """Test get_hosts_list invoqued multiple times if freebox into router mode.""" + await setup_platform(hass, DEVICE_TRACKER_DOMAIN) + + assert router().lan.get_hosts_list.call_count == 1 + + # Simulate an update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert router().lan.get_hosts_list.call_count == 2 + + +async def test_bridge_mode( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + router_bridge_mode: Mock, +) -> None: + """Test get_hosts_list invoqued once if freebox into bridge mode.""" + await setup_platform(hass, DEVICE_TRACKER_DOMAIN) + + assert router_bridge_mode().lan.get_hosts_list.call_count == 1 + + # Simulate an update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # If get_hosts_list failed, not called again + assert router_bridge_mode().lan.get_hosts_list.call_count == 1 diff --git a/tests/components/freebox/test_init.py b/tests/components/freebox/test_init.py index 85acfdccc4d..9064727fb7f 100644 --- a/tests/components/freebox/test_init.py +++ b/tests/components/freebox/test_init.py @@ -1,4 +1,4 @@ -"""Tests for the Freebox config flow.""" +"""Tests for the Freebox init.""" from unittest.mock import ANY, Mock, patch from pytest_unordered import unordered diff --git a/tests/components/freebox/test_router.py b/tests/components/freebox/test_router.py new file mode 100644 index 00000000000..572c168e665 --- /dev/null +++ b/tests/components/freebox/test_router.py @@ -0,0 +1,22 @@ +"""Tests for the Freebox utility methods.""" +import json + +from homeassistant.components.freebox.router import is_json + +from .const import DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE, DATA_WIFI_GET_GLOBAL_CONFIG + + +async def test_is_json() -> None: + """Test is_json method.""" + + # Valid JSON values + assert is_json("{}") + assert is_json('{ "simple":"json" }') + assert is_json(json.dumps(DATA_WIFI_GET_GLOBAL_CONFIG)) + assert is_json(json.dumps(DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE)) + + # Not valid JSON values + assert not is_json(None) + assert not is_json("") + assert not is_json("XXX") + assert not is_json("{XXX}") diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index bb34af7c400..ded7cda0dea 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -48,9 +48,9 @@ async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N ), patch( "homeassistant.components.fritz.async_setup_entry" ) as mock_setup_entry, patch( - "requests.get" + "requests.get", ) as mock_request_get, patch( - "requests.post" + "requests.post", ) as mock_request_post, patch( "homeassistant.components.fritz.config_flow.socket.gethostbyname", return_value=MOCK_IPS["fritz.box"], @@ -98,9 +98,9 @@ async def test_user_already_configured( "homeassistant.components.fritz.common.FritzBoxTools._update_device_info", return_value=MOCK_FIRMWARE_INFO, ), patch( - "requests.get" + "requests.get", ) as mock_request_get, patch( - "requests.post" + "requests.post", ) as mock_request_post, patch( "homeassistant.components.fritz.config_flow.socket.gethostbyname", return_value=MOCK_IPS["fritz.box"], @@ -211,11 +211,11 @@ async def test_reauth_successful( "homeassistant.components.fritz.common.FritzBoxTools._update_device_info", return_value=MOCK_FIRMWARE_INFO, ), patch( - "homeassistant.components.fritz.async_setup_entry" + "homeassistant.components.fritz.async_setup_entry", ) as mock_setup_entry, patch( - "requests.get" + "requests.get", ) as mock_request_get, patch( - "requests.post" + "requests.post", ) as mock_request_post: mock_request_get.return_value.status_code = 200 mock_request_get.return_value.content = MOCK_REQUEST @@ -399,9 +399,7 @@ async def test_ssdp(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N return_value=MOCK_FIRMWARE_INFO, ), patch( "homeassistant.components.fritz.async_setup_entry" - ) as mock_setup_entry, patch( - "requests.get" - ) as mock_request_get, patch( + ) as mock_setup_entry, patch("requests.get") as mock_request_get, patch( "requests.post" ) as mock_request_post: mock_request_get.return_value.status_code = 200 diff --git a/tests/components/fritzbox/__init__.py b/tests/components/fritzbox/__init__.py index 15ff04f3720..1faf37c84ee 100644 --- a/tests/components/fritzbox/__init__.py +++ b/tests/components/fritzbox/__init__.py @@ -45,6 +45,17 @@ async def setup_config_entry( return result +def set_devices( + fritz: Mock, devices: list[Mock] | None = None, templates: list[Mock] | None = None +) -> None: + """Set list of devices or templates.""" + if devices is not None: + fritz().get_devices.return_value = devices + + if templates is not None: + fritz().get_templates.return_value = templates + + class FritzEntityBaseMock(Mock): """base mock of a AVM Fritz!Box binary sensor device.""" diff --git a/tests/components/fritzbox/test_binary_sensor.py b/tests/components/fritzbox/test_binary_sensor.py index ac6b702147a..983516bb9c0 100644 --- a/tests/components/fritzbox/test_binary_sensor.py +++ b/tests/components/fritzbox/test_binary_sensor.py @@ -21,7 +21,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from . import FritzDeviceBinarySensorMock, setup_config_entry +from . import FritzDeviceBinarySensorMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -126,3 +126,26 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: assert fritz().update_devices.call_count == 2 assert fritz().login.call_count == 1 + + +async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: + """Test adding new discovered devices during runtime.""" + device = FritzDeviceBinarySensorMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(f"{ENTITY_ID}_alarm") + assert state + + new_device = FritzDeviceBinarySensorMock() + new_device.ain = "7890 1234" + new_device.name = "new_device" + set_devices(fritz, devices=[device, new_device]) + + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + state = hass.states.get(f"{DOMAIN}.new_device_alarm") + assert state diff --git a/tests/components/fritzbox/test_button.py b/tests/components/fritzbox/test_button.py index 9c53c895f5d..8c0bbec573e 100644 --- a/tests/components/fritzbox/test_button.py +++ b/tests/components/fritzbox/test_button.py @@ -1,4 +1,5 @@ """Tests for AVM Fritz!Box templates.""" +from datetime import timedelta from unittest.mock import Mock from homeassistant.components.button import DOMAIN, SERVICE_PRESS @@ -10,10 +11,13 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util -from . import FritzEntityBaseMock, setup_config_entry +from . import FritzEntityBaseMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG +from tests.common import async_fire_time_changed + ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" @@ -41,3 +45,26 @@ async def test_apply_template(hass: HomeAssistant, fritz: Mock) -> None: DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert fritz().apply_template.call_count == 1 + + +async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: + """Test adding new discovered devices during runtime.""" + template = FritzEntityBaseMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], fritz=fritz, template=template + ) + + state = hass.states.get(ENTITY_ID) + assert state + + new_template = FritzEntityBaseMock() + new_template.ain = "7890 1234" + new_template.name = "new_template" + set_devices(fritz, templates=[template, new_template]) + + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + state = hass.states.get(f"{DOMAIN}.new_template") + assert state diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index d49b5710a12..a14c53d6529 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -41,7 +41,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from . import FritzDeviceClimateMock, setup_config_entry +from . import FritzDeviceClimateMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -402,3 +402,26 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: assert fritz().update_devices.call_count == 3 assert state assert state.attributes[ATTR_PRESET_MODE] == PRESET_ECO + + +async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: + """Test adding new discovered devices during runtime.""" + device = FritzDeviceClimateMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + + new_device = FritzDeviceClimateMock() + new_device.ain = "7890 1234" + new_device.name = "new_climate" + set_devices(fritz, devices=[device, new_device]) + + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + state = hass.states.get(f"{DOMAIN}.new_climate") + assert state diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index af725ce93da..e3a6d786abf 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -1,4 +1,5 @@ """Tests for AVM Fritz!Box switch component.""" +from datetime import timedelta from unittest.mock import Mock, call from homeassistant.components.cover import ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN @@ -12,10 +13,13 @@ from homeassistant.const import ( SERVICE_STOP_COVER, ) from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util -from . import FritzDeviceCoverMock, setup_config_entry +from . import FritzDeviceCoverMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG +from tests.common import async_fire_time_changed + ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" @@ -84,3 +88,26 @@ async def test_stop_cover(hass: HomeAssistant, fritz: Mock) -> None: DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_stop.call_count == 1 + + +async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: + """Test adding new discovered devices during runtime.""" + device = FritzDeviceCoverMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + + new_device = FritzDeviceCoverMock() + new_device.ain = "7890 1234" + new_device.name = "new_climate" + set_devices(fritz, devices=[device, new_device]) + + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + state = hass.states.get(f"{DOMAIN}.new_climate") + assert state diff --git a/tests/components/fritzbox/test_init.py b/tests/components/fritzbox/test_init.py index 5c8d30772f0..b8273204325 100644 --- a/tests/components/fritzbox/test_init.py +++ b/tests/components/fritzbox/test_init.py @@ -296,7 +296,7 @@ async def test_remove_device( ) response = await ws_client.receive_json() assert not response["success"] - assert response["error"]["code"] == "unknown_error" + assert response["error"]["code"] == "home_assistant_error" await hass.async_block_till_done() # try to delete orphan_device diff --git a/tests/components/fritzbox/test_light.py b/tests/components/fritzbox/test_light.py index 5511b93ac3f..858b564cd18 100644 --- a/tests/components/fritzbox/test_light.py +++ b/tests/components/fritzbox/test_light.py @@ -29,7 +29,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from . import FritzDeviceLightMock, setup_config_entry +from . import FritzDeviceLightMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -262,3 +262,38 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: assert fritz().update_devices.call_count == 4 assert fritz().login.call_count == 4 + + +async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: + """Test adding new discovered devices during runtime.""" + device = FritzDeviceLightMock() + device.get_color_temps.return_value = [2700, 6500] + device.get_colors.return_value = { + "Red": [("100", "70", "10"), ("100", "50", "10"), ("100", "30", "10")] + } + device.color_mode = COLOR_TEMP_MODE + device.color_temp = 2700 + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + + new_device = FritzDeviceLightMock() + new_device.ain = "7890 1234" + new_device.name = "new_light" + new_device.get_color_temps.return_value = [2700, 6500] + new_device.get_colors.return_value = { + "Red": [("100", "70", "10"), ("100", "50", "10"), ("100", "30", "10")] + } + new_device.color_mode = COLOR_TEMP_MODE + new_device.color_temp = 2700 + set_devices(fritz, devices=[device, new_device]) + + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + state = hass.states.get(f"{DOMAIN}.new_light") + assert state diff --git a/tests/components/fritzbox/test_sensor.py b/tests/components/fritzbox/test_sensor.py index b363d966c01..9fe25d02ed0 100644 --- a/tests/components/fritzbox/test_sensor.py +++ b/tests/components/fritzbox/test_sensor.py @@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util -from . import FritzDeviceSensorMock, setup_config_entry +from . import FritzDeviceSensorMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -108,3 +108,26 @@ async def test_update_error(hass: HomeAssistant, fritz: Mock) -> None: assert fritz().update_devices.call_count == 4 assert fritz().login.call_count == 4 + + +async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: + """Test adding new discovered devices during runtime.""" + device = FritzDeviceSensorMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(f"{ENTITY_ID}_temperature") + assert state + + new_device = FritzDeviceSensorMock() + new_device.ain = "7890 1234" + new_device.name = "new_device" + set_devices(fritz, devices=[device, new_device]) + + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + state = hass.states.get(f"{DOMAIN}.new_device_temperature") + assert state diff --git a/tests/components/fritzbox/test_switch.py b/tests/components/fritzbox/test_switch.py index 4ed1a88190a..aefe21e3ffc 100644 --- a/tests/components/fritzbox/test_switch.py +++ b/tests/components/fritzbox/test_switch.py @@ -31,7 +31,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util -from . import FritzDeviceSwitchMock, setup_config_entry +from . import FritzDeviceSwitchMock, set_devices, setup_config_entry from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -187,3 +187,26 @@ async def test_assume_device_unavailable(hass: HomeAssistant, fritz: Mock) -> No state = hass.states.get(ENTITY_ID) assert state assert state.state == STATE_UNAVAILABLE + + +async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: + """Test adding new discovered devices during runtime.""" + device = FritzDeviceSwitchMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + + new_device = FritzDeviceSwitchMock() + new_device.ain = "7890 1234" + new_device.name = "new_switch" + set_devices(fritz, devices=[device, new_device]) + + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done() + + state = hass.states.get(f"{DOMAIN}.new_switch") + assert state diff --git a/tests/components/fronius/test_sensor.py b/tests/components/fronius/test_sensor.py index f94b0f3a55c..684e9a3ae5f 100644 --- a/tests/components/fronius/test_sensor.py +++ b/tests/components/fronius/test_sensor.py @@ -1,6 +1,6 @@ """Tests for the Fronius sensor platform.""" - from freezegun.api import FrozenDateTimeFactory +import pytest from homeassistant.components.fronius.const import DOMAIN from homeassistant.components.fronius.coordinator import ( @@ -33,33 +33,34 @@ async def test_symo_inverter( mock_responses(aioclient_mock, night=True) config_entry = await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 20 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 21 await enable_all_entities( hass, freezer, config_entry.entry_id, FroniusInverterUpdateCoordinator.default_interval, ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 52 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 54 assert_state("sensor.symo_20_dc_current", 0) assert_state("sensor.symo_20_energy_day", 10828) assert_state("sensor.symo_20_total_energy", 44186900) assert_state("sensor.symo_20_energy_year", 25507686) assert_state("sensor.symo_20_dc_voltage", 16) + assert_state("sensor.symo_20_status_message", "startup") # Second test at daytime when inverter is producing mock_responses(aioclient_mock, night=False) freezer.tick(FroniusInverterUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 56 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 await enable_all_entities( hass, freezer, config_entry.entry_id, FroniusInverterUpdateCoordinator.default_interval, ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 60 # 4 additional AC entities assert_state("sensor.symo_20_dc_current", 2.19) assert_state("sensor.symo_20_energy_day", 1113) @@ -70,6 +71,7 @@ async def test_symo_inverter( assert_state("sensor.symo_20_frequency", 49.94) assert_state("sensor.symo_20_ac_power", 1190) assert_state("sensor.symo_20_ac_voltage", 227.90) + assert_state("sensor.symo_20_status_message", "running") # Third test at nighttime - additional AC entities default to 0 mock_responses(aioclient_mock, night=True) @@ -94,7 +96,7 @@ async def test_symo_logger( mock_responses(aioclient_mock) await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 24 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 25 # states are rounded to 4 decimals assert_state("sensor.solarnet_grid_export_tariff", 0.078) assert_state("sensor.solarnet_co2_factor", 0.53) @@ -116,14 +118,14 @@ async def test_symo_meter( mock_responses(aioclient_mock) config_entry = await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 24 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 25 await enable_all_entities( hass, freezer, config_entry.entry_id, FroniusMeterUpdateCoordinator.default_interval, ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 60 # states are rounded to 4 decimals assert_state("sensor.smart_meter_63a_current_phase_1", 7.755) assert_state("sensor.smart_meter_63a_current_phase_2", 6.68) @@ -157,6 +159,50 @@ async def test_symo_meter( assert_state("sensor.smart_meter_63a_voltage_phase_1_2", 395.9) assert_state("sensor.smart_meter_63a_voltage_phase_2_3", 398) assert_state("sensor.smart_meter_63a_voltage_phase_3_1", 398) + assert_state("sensor.smart_meter_63a_meter_location", 0) + assert_state("sensor.smart_meter_63a_meter_location_description", "feed_in") + + +@pytest.mark.parametrize( + ("location_code", "expected_code", "expected_description"), + [ + (-1, -1, "unknown"), + (3, 3, "external_generator"), + (4, 4, "external_battery"), + (7, 7, "unknown"), + (256, 256, "subload"), + (511, 511, "subload"), + (512, 512, "unknown"), + ], +) +async def test_symo_meter_forged( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + location_code: int | None, + expected_code: int | str, + expected_description: str, +) -> None: + """Tests for meter location codes we have no fixture for.""" + + def assert_state(entity_id, expected_state): + state = hass.states.get(entity_id) + assert state + assert state.state == str(expected_state) + + mock_responses( + aioclient_mock, + fixture_set="symo", + override_data={ + "symo/GetMeterRealtimeData.json": [ + (["Body", "Data", "0", "Meter_Location_Current"], location_code), + ], + }, + ) + await setup_fronius_integration(hass) + assert_state("sensor.smart_meter_63a_meter_location", expected_code) + assert_state( + "sensor.smart_meter_63a_meter_location_description", expected_description + ) async def test_symo_power_flow( @@ -175,14 +221,14 @@ async def test_symo_power_flow( mock_responses(aioclient_mock, night=True) config_entry = await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 20 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 21 await enable_all_entities( hass, freezer, config_entry.entry_id, FroniusInverterUpdateCoordinator.default_interval, ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 52 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 54 # states are rounded to 4 decimals assert_state("sensor.solarnet_energy_day", 10828) assert_state("sensor.solarnet_total_energy", 44186900) @@ -197,7 +243,7 @@ async def test_symo_power_flow( async_fire_time_changed(hass) await hass.async_block_till_done() # 54 because power_flow `rel_SelfConsumption` and `P_PV` is not `null` anymore - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 54 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 56 assert_state("sensor.solarnet_energy_day", 1101.7001) assert_state("sensor.solarnet_total_energy", 44188000) assert_state("sensor.solarnet_energy_year", 25508788) @@ -212,7 +258,7 @@ async def test_symo_power_flow( freezer.tick(FroniusPowerFlowUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 54 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 56 assert_state("sensor.solarnet_energy_day", 10828) assert_state("sensor.solarnet_total_energy", 44186900) assert_state("sensor.solarnet_energy_year", 25507686) @@ -238,18 +284,19 @@ async def test_gen24( mock_responses(aioclient_mock, fixture_set="gen24") config_entry = await setup_fronius_integration(hass, is_logger=False) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 22 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 23 await enable_all_entities( hass, freezer, config_entry.entry_id, FroniusMeterUpdateCoordinator.default_interval, ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 52 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 54 # inverter 1 assert_state("sensor.inverter_name_ac_current", 0.1589) assert_state("sensor.inverter_name_dc_current_2", 0.0754) assert_state("sensor.inverter_name_status_code", 7) + assert_state("sensor.inverter_name_status_message", "running") assert_state("sensor.inverter_name_dc_current", 0.0783) assert_state("sensor.inverter_name_dc_voltage_2", 403.4312) assert_state("sensor.inverter_name_ac_power", 37.3204) @@ -264,7 +311,8 @@ async def test_gen24( assert_state("sensor.smart_meter_ts_65a_3_real_energy_consumed", 2013105.0) assert_state("sensor.smart_meter_ts_65a_3_real_power", 653.1) assert_state("sensor.smart_meter_ts_65a_3_frequency_phase_average", 49.9) - assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0.0) + assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0) + assert_state("sensor.smart_meter_ts_65a_3_meter_location_description", "feed_in") assert_state("sensor.smart_meter_ts_65a_3_power_factor", 0.828) assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_consumed", 88221.0) assert_state("sensor.smart_meter_ts_65a_3_real_energy_minus", 3863340.0) @@ -336,14 +384,14 @@ async def test_gen24_storage( hass, is_logger=False, unique_id="12345678" ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 34 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 35 await enable_all_entities( hass, freezer, config_entry.entry_id, FroniusMeterUpdateCoordinator.default_interval, ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 66 # inverter 1 assert_state("sensor.gen24_storage_dc_current", 0.3952) assert_state("sensor.gen24_storage_dc_voltage_2", 318.8103) @@ -352,6 +400,7 @@ async def test_gen24_storage( assert_state("sensor.gen24_storage_ac_power", 250.9093) assert_state("sensor.gen24_storage_error_code", 0) assert_state("sensor.gen24_storage_status_code", 7) + assert_state("sensor.gen24_storage_status_message", "running") assert_state("sensor.gen24_storage_total_energy", 7512794.0117) assert_state("sensor.gen24_storage_inverter_state", "Running") assert_state("sensor.gen24_storage_dc_voltage", 419.1009) @@ -363,7 +412,8 @@ async def test_gen24_storage( assert_state("sensor.smart_meter_ts_65a_3_power_factor", 0.698) assert_state("sensor.smart_meter_ts_65a_3_real_energy_consumed", 1247204.0) assert_state("sensor.smart_meter_ts_65a_3_frequency_phase_average", 49.9) - assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0.0) + assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0) + assert_state("sensor.smart_meter_ts_65a_3_meter_location_description", "feed_in") assert_state("sensor.smart_meter_ts_65a_3_reactive_power", -501.5) assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_produced", 3266105.0) assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_3", 19.6) @@ -396,7 +446,7 @@ async def test_gen24_storage( assert_state("sensor.ohmpilot_power", 0.0) assert_state("sensor.ohmpilot_temperature", 38.9) assert_state("sensor.ohmpilot_state_code", 0.0) - assert_state("sensor.ohmpilot_state_message", "Up and running") + assert_state("sensor.ohmpilot_state_message", "up_and_running") # power_flow assert_state("sensor.solarnet_power_grid", 2274.9) assert_state("sensor.solarnet_power_battery", 0.1591) @@ -463,14 +513,14 @@ async def test_primo_s0( mock_responses(aioclient_mock, fixture_set="primo_s0", inverter_ids=[1, 2]) config_entry = await setup_fronius_integration(hass, is_logger=True) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 29 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 30 await enable_all_entities( hass, freezer, config_entry.entry_id, FroniusMeterUpdateCoordinator.default_interval, ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 40 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 43 # logger assert_state("sensor.solarnet_grid_export_tariff", 1) assert_state("sensor.solarnet_co2_factor", 0.53) @@ -483,6 +533,7 @@ async def test_primo_s0( assert_state("sensor.primo_5_0_1_error_code", 0) assert_state("sensor.primo_5_0_1_dc_current", 4.23) assert_state("sensor.primo_5_0_1_status_code", 7) + assert_state("sensor.primo_5_0_1_status_message", "running") assert_state("sensor.primo_5_0_1_energy_year", 7532755.5) assert_state("sensor.primo_5_0_1_ac_current", 3.85) assert_state("sensor.primo_5_0_1_ac_voltage", 223.9) @@ -497,6 +548,7 @@ async def test_primo_s0( assert_state("sensor.primo_3_0_1_error_code", 0) assert_state("sensor.primo_3_0_1_dc_current", 0.97) assert_state("sensor.primo_3_0_1_status_code", 7) + assert_state("sensor.primo_3_0_1_status_message", "running") assert_state("sensor.primo_3_0_1_energy_year", 3596193.25) assert_state("sensor.primo_3_0_1_ac_current", 1.32) assert_state("sensor.primo_3_0_1_ac_voltage", 223.6) @@ -505,6 +557,9 @@ async def test_primo_s0( assert_state("sensor.primo_3_0_1_led_state", 0) # meter assert_state("sensor.s0_meter_at_inverter_1_meter_location", 1) + assert_state( + "sensor.s0_meter_at_inverter_1_meter_location_description", "consumption_path" + ) assert_state("sensor.s0_meter_at_inverter_1_real_power", -2216.7487) # power_flow assert_state("sensor.solarnet_power_load", -2218.9349) diff --git a/tests/components/fully_kiosk/conftest.py b/tests/components/fully_kiosk/conftest.py index bed08b532fd..e409a0a3787 100644 --- a/tests/components/fully_kiosk/conftest.py +++ b/tests/components/fully_kiosk/conftest.py @@ -8,7 +8,13 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest from homeassistant.components.fully_kiosk.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + CONF_SSL, + CONF_VERIFY_SSL, +) from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -24,6 +30,8 @@ def mock_config_entry() -> MockConfigEntry: CONF_HOST: "127.0.0.1", CONF_PASSWORD: "mocked-password", CONF_MAC: "aa:bb:cc:dd:ee:ff", + CONF_SSL: False, + CONF_VERIFY_SSL: False, }, unique_id="12345", ) diff --git a/tests/components/fully_kiosk/test_config_flow.py b/tests/components/fully_kiosk/test_config_flow.py index 566f3b6d292..018a62b5dc7 100644 --- a/tests/components/fully_kiosk/test_config_flow.py +++ b/tests/components/fully_kiosk/test_config_flow.py @@ -10,7 +10,13 @@ import pytest from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.config_entries import SOURCE_DHCP, SOURCE_MQTT, SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + CONF_SSL, + CONF_VERIFY_SSL, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.service_info.mqtt import MqttServiceInfo @@ -35,6 +41,8 @@ async def test_user_flow( { CONF_HOST: "1.1.1.1", CONF_PASSWORD: "test-password", + CONF_SSL: False, + CONF_VERIFY_SSL: False, }, ) @@ -44,6 +52,8 @@ async def test_user_flow( CONF_HOST: "1.1.1.1", CONF_PASSWORD: "test-password", CONF_MAC: "aa:bb:cc:dd:ee:ff", + CONF_SSL: False, + CONF_VERIFY_SSL: False, } assert "result" in result2 assert result2["result"].unique_id == "12345" @@ -76,7 +86,13 @@ async def test_errors( mock_fully_kiosk_config_flow.getDeviceInfo.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "1.1.1.1", CONF_PASSWORD: "test-password"} + flow_id, + user_input={ + CONF_HOST: "1.1.1.1", + CONF_PASSWORD: "test-password", + CONF_SSL: False, + CONF_VERIFY_SSL: False, + }, ) assert result2.get("type") == FlowResultType.FORM @@ -88,7 +104,13 @@ async def test_errors( mock_fully_kiosk_config_flow.getDeviceInfo.side_effect = None result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "1.1.1.1", CONF_PASSWORD: "test-password"} + flow_id, + user_input={ + CONF_HOST: "1.1.1.1", + CONF_PASSWORD: "test-password", + CONF_SSL: True, + CONF_VERIFY_SSL: False, + }, ) assert result3.get("type") == FlowResultType.CREATE_ENTRY @@ -97,6 +119,8 @@ async def test_errors( CONF_HOST: "1.1.1.1", CONF_PASSWORD: "test-password", CONF_MAC: "aa:bb:cc:dd:ee:ff", + CONF_SSL: True, + CONF_VERIFY_SSL: False, } assert "result" in result3 assert result3["result"].unique_id == "12345" @@ -124,6 +148,8 @@ async def test_duplicate_updates_existing_entry( { CONF_HOST: "1.1.1.1", CONF_PASSWORD: "test-password", + CONF_SSL: True, + CONF_VERIFY_SSL: True, }, ) @@ -133,6 +159,8 @@ async def test_duplicate_updates_existing_entry( CONF_HOST: "1.1.1.1", CONF_PASSWORD: "test-password", CONF_MAC: "aa:bb:cc:dd:ee:ff", + CONF_SSL: True, + CONF_VERIFY_SSL: True, } assert len(mock_fully_kiosk_config_flow.getDeviceInfo.mock_calls) == 1 @@ -161,6 +189,8 @@ async def test_dhcp_discovery_updates_entry( CONF_HOST: "127.0.0.2", CONF_PASSWORD: "mocked-password", CONF_MAC: "aa:bb:cc:dd:ee:ff", + CONF_SSL: False, + CONF_VERIFY_SSL: False, } @@ -212,6 +242,8 @@ async def test_mqtt_discovery_flow( result["flow_id"], { CONF_PASSWORD: "test-password", + CONF_SSL: False, + CONF_VERIFY_SSL: False, }, ) @@ -222,6 +254,8 @@ async def test_mqtt_discovery_flow( CONF_HOST: "192.168.1.234", CONF_PASSWORD: "test-password", CONF_MAC: "aa:bb:cc:dd:ee:ff", + CONF_SSL: False, + CONF_VERIFY_SSL: False, } assert "result" in confirmResult assert confirmResult["result"].unique_id == "12345" diff --git a/tests/components/fully_kiosk/test_init.py b/tests/components/fully_kiosk/test_init.py index 5c77b8a9d06..2e77cdb2f1d 100644 --- a/tests/components/fully_kiosk/test_init.py +++ b/tests/components/fully_kiosk/test_init.py @@ -9,7 +9,13 @@ import pytest from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.components.fully_kiosk.entity import valid_global_mac_address from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + CONF_SSL, + CONF_VERIFY_SSL, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -92,6 +98,8 @@ async def test_multiple_kiosk_with_empty_mac( CONF_HOST: "127.0.0.1", CONF_PASSWORD: "mocked-password", CONF_MAC: "", + CONF_SSL: False, + CONF_VERIFY_SSL: False, }, unique_id="111111", ) @@ -105,6 +113,8 @@ async def test_multiple_kiosk_with_empty_mac( CONF_HOST: "127.0.0.2", CONF_PASSWORD: "mocked-password", CONF_MAC: "", + CONF_SSL: True, + CONF_VERIFY_SSL: False, }, unique_id="22222", ) diff --git a/tests/components/fully_kiosk/test_switch.py b/tests/components/fully_kiosk/test_switch.py index 4cbdad8d63a..20b5ed11998 100644 --- a/tests/components/fully_kiosk/test_switch.py +++ b/tests/components/fully_kiosk/test_switch.py @@ -7,7 +7,8 @@ from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_mqtt_message +from tests.typing import MqttMockHAClient async def test_switches( @@ -86,6 +87,51 @@ async def test_switches( assert device_entry.sw_version == "1.42.5" +async def test_switches_mqtt_update( + hass: HomeAssistant, + mock_fully_kiosk: MagicMock, + mqtt_mock: MqttMockHAClient, + init_integration: MockConfigEntry, +) -> None: + """Test push updates over MQTT.""" + assert has_subscribed(mqtt_mock, "fully/event/onScreensaverStart/abcdef-123456") + assert has_subscribed(mqtt_mock, "fully/event/onScreensaverStop/abcdef-123456") + assert has_subscribed(mqtt_mock, "fully/event/screenOff/abcdef-123456") + assert has_subscribed(mqtt_mock, "fully/event/screenOn/abcdef-123456") + + entity = hass.states.get("switch.amazon_fire_screensaver") + assert entity + assert entity.state == "off" + + entity = hass.states.get("switch.amazon_fire_screen") + assert entity + assert entity.state == "on" + + async_fire_mqtt_message(hass, "fully/event/onScreensaverStart/abcdef-123456", "{}") + entity = hass.states.get("switch.amazon_fire_screensaver") + assert entity.state == "on" + + async_fire_mqtt_message(hass, "fully/event/onScreensaverStop/abcdef-123456", "{}") + entity = hass.states.get("switch.amazon_fire_screensaver") + assert entity.state == "off" + + async_fire_mqtt_message(hass, "fully/event/screenOff/abcdef-123456", "{}") + entity = hass.states.get("switch.amazon_fire_screen") + assert entity.state == "off" + + async_fire_mqtt_message(hass, "fully/event/screenOn/abcdef-123456", "{}") + entity = hass.states.get("switch.amazon_fire_screen") + assert entity.state == "on" + + +def has_subscribed(mqtt_mock: MqttMockHAClient, topic: str) -> bool: + """Check if MQTT topic has subscription.""" + for call in mqtt_mock.async_subscribe.call_args_list: + if call.args[0] == topic: + return True + return False + + def call_service(hass, service, entity_id): """Call any service on entity.""" return hass.services.async_call( diff --git a/tests/components/gdacs/test_sensor.py b/tests/components/gdacs/test_sensor.py index da318b1a94d..670d3efce51 100644 --- a/tests/components/gdacs/test_sensor.py +++ b/tests/components/gdacs/test_sensor.py @@ -72,10 +72,10 @@ async def test_setup(hass: HomeAssistant) -> None: == 4 ) - state = hass.states.get("sensor.gdacs_32_87336_117_22743") + state = hass.states.get("sensor.32_87336_117_22743") assert state is not None assert int(state.state) == 3 - assert state.name == "GDACS (32.87336, -117.22743)" + assert state.name == "32.87336, -117.22743" attributes = state.attributes assert attributes[ATTR_STATUS] == "OK" assert attributes[ATTR_CREATED] == 3 @@ -96,7 +96,7 @@ async def test_setup(hass: HomeAssistant) -> None: == 4 ) - state = hass.states.get("sensor.gdacs_32_87336_117_22743") + state = hass.states.get("sensor.32_87336_117_22743") attributes = state.attributes assert attributes[ATTR_CREATED] == 1 assert attributes[ATTR_UPDATED] == 2 @@ -125,6 +125,6 @@ async def test_setup(hass: HomeAssistant) -> None: == 1 ) - state = hass.states.get("sensor.gdacs_32_87336_117_22743") + state = hass.states.get("sensor.32_87336_117_22743") attributes = state.attributes assert attributes[ATTR_REMOVED] == 3 diff --git a/tests/components/generic/test_camera.py b/tests/components/generic/test_camera.py index aecfcbc29c1..8bfd0a66dd5 100644 --- a/tests/components/generic/test_camera.py +++ b/tests/components/generic/test_camera.py @@ -1,7 +1,6 @@ """The tests for generic camera component.""" import asyncio from http import HTTPStatus -import sys from unittest.mock import patch import aiohttp @@ -164,17 +163,10 @@ async def test_limit_refetch( hass.states.async_set("sensor.temp", "5") - # TODO: Remove version check with aiohttp 3.9.0 - if sys.version_info >= (3, 12): - with pytest.raises(aiohttp.ServerTimeoutError), patch( - "asyncio.timeout", side_effect=asyncio.TimeoutError() - ): - resp = await client.get("/api/camera_proxy/camera.config_test") - else: - with pytest.raises(aiohttp.ServerTimeoutError), patch( - "async_timeout.timeout", side_effect=asyncio.TimeoutError() - ): - resp = await client.get("/api/camera_proxy/camera.config_test") + with pytest.raises(aiohttp.ServerTimeoutError), patch( + "asyncio.timeout", side_effect=asyncio.TimeoutError() + ): + resp = await client.get("/api/camera_proxy/camera.config_test") assert respx.calls.call_count == 1 assert resp.status == HTTPStatus.OK diff --git a/tests/components/gios/__init__.py b/tests/components/gios/__init__.py index 946cceac786..4e69420f66e 100644 --- a/tests/components/gios/__init__.py +++ b/tests/components/gios/__init__.py @@ -43,7 +43,8 @@ async def init_integration( "homeassistant.components.gios.Gios._get_all_sensors", return_value=sensors, ), patch( - "homeassistant.components.gios.Gios._get_indexes", return_value=indexes + "homeassistant.components.gios.Gios._get_indexes", + return_value=indexes, ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/gios/test_config_flow.py b/tests/components/gios/test_config_flow.py index 3d52c122791..efe46be9b8d 100644 --- a/tests/components/gios/test_config_flow.py +++ b/tests/components/gios/test_config_flow.py @@ -55,7 +55,8 @@ async def test_invalid_sensor_data(hass: HomeAssistant) -> None: "homeassistant.components.gios.Gios._get_station", return_value=json.loads(load_fixture("gios/station.json")), ), patch( - "homeassistant.components.gios.Gios._get_sensor", return_value={} + "homeassistant.components.gios.Gios._get_sensor", + return_value={}, ): flow = config_flow.GiosFlowHandler() flow.hass = hass @@ -83,7 +84,8 @@ async def test_cannot_connect(hass: HomeAssistant) -> None: async def test_create_entry(hass: HomeAssistant) -> None: """Test that the user step works.""" with patch( - "homeassistant.components.gios.Gios._get_stations", return_value=STATIONS + "homeassistant.components.gios.Gios._get_stations", + return_value=STATIONS, ), patch( "homeassistant.components.gios.Gios._get_station", return_value=json.loads(load_fixture("gios/station.json")), diff --git a/tests/components/gios/test_init.py b/tests/components/gios/test_init.py index 0d4484c6d0d..d20aecad3df 100644 --- a/tests/components/gios/test_init.py +++ b/tests/components/gios/test_init.py @@ -82,9 +82,7 @@ async def test_migrate_device_and_config_entry( ), patch( "homeassistant.components.gios.Gios._get_all_sensors", return_value=sensors, - ), patch( - "homeassistant.components.gios.Gios._get_indexes", return_value=indexes - ): + ), patch("homeassistant.components.gios.Gios._get_indexes", return_value=indexes): config_entry.add_to_hass(hass) device_entry = device_registry.async_get_or_create( diff --git a/tests/components/google_assistant/test_helpers.py b/tests/components/google_assistant/test_helpers.py index 57915968933..aaa3949caaf 100644 --- a/tests/components/google_assistant/test_helpers.py +++ b/tests/components/google_assistant/test_helpers.py @@ -14,14 +14,17 @@ from homeassistant.components.google_assistant.const import ( SOURCE_LOCAL, STORE_GOOGLE_LOCAL_WEBHOOK_ID, ) +from homeassistant.components.matter.models import MatterDeviceInfo from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from . import MockConfig from tests.common import ( + MockConfigEntry, async_capture_events, async_fire_time_changed, async_mock_service, @@ -73,6 +76,57 @@ async def test_google_entity_sync_serialize_with_local_sdk(hass: HomeAssistant) assert "customData" not in serialized +async def test_google_entity_sync_serialize_with_matter( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test sync serialize attributes of a GoogleEntity that is also a Matter device.""" + entry = MockConfigEntry() + entry.add_to_hass(hass) + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + manufacturer="Someone", + model="Some model", + sw_version="Some Version", + identifiers={("matter", "12345678")}, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + entity = entity_registry.async_get_or_create( + "light", + "test", + "1235", + suggested_object_id="ceiling_lights", + device_id=device.id, + ) + hass.states.async_set("light.ceiling_lights", "off") + + entity = helpers.GoogleEntity( + hass, MockConfig(hass=hass), hass.states.get("light.ceiling_lights") + ) + + serialized = entity.sync_serialize(None, "mock-uuid") + assert "matterUniqueId" not in serialized + assert "matterOriginalVendorId" not in serialized + assert "matterOriginalProductId" not in serialized + + hass.config.components.add("matter") + + with patch( + "homeassistant.components.matter.get_matter_device_info", + return_value=MatterDeviceInfo( + unique_id="mock-unique-id", + vendor_id="mock-vendor-id", + product_id="mock-product-id", + ), + ): + serialized = entity.sync_serialize("mock-user-id", "abcdef") + + assert serialized["matterUniqueId"] == "mock-unique-id" + assert serialized["matterOriginalVendorId"] == "mock-vendor-id" + assert serialized["matterOriginalProductId"] == "mock-product-id" + + async def test_config_local_sdk( hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: diff --git a/tests/components/google_assistant/test_http.py b/tests/components/google_assistant/test_http.py index 62d2722c445..aa7f8472cab 100644 --- a/tests/components/google_assistant/test_http.py +++ b/tests/components/google_assistant/test_http.py @@ -92,7 +92,7 @@ async def test_update_access_token(hass: HomeAssistant) -> None: ) as mock_get_token, patch( "homeassistant.components.google_assistant.http._get_homegraph_jwt" ) as mock_get_jwt, patch( - "homeassistant.core.dt_util.utcnow" + "homeassistant.core.dt_util.utcnow", ) as mock_utcnow: mock_utcnow.return_value = base_time mock_get_jwt.return_value = jwt diff --git a/tests/components/google_assistant_sdk/test_notify.py b/tests/components/google_assistant_sdk/test_notify.py index f35d19e3805..cf3f90097ce 100644 --- a/tests/components/google_assistant_sdk/test_notify.py +++ b/tests/components/google_assistant_sdk/test_notify.py @@ -66,7 +66,12 @@ async def test_broadcast_no_targets( "Anuncia en el salón Es hora de hacer los deberes", ), ("ko-KR", "숙제할 시간이야", "거실", "숙제할 시간이야 라고 거실에 방송해 줘"), - ("ja-JP", "宿題の時間だよ", "リビング", "宿題の時間だよとリビングにブロードキャストして"), + ( + "ja-JP", + "宿題の時間だよ", + "リビング", + "宿題の時間だよとリビングにブロードキャストして", + ), ], ids=["english", "spanish", "korean", "japanese"], ) diff --git a/tests/components/google_tasks/snapshots/test_todo.ambr b/tests/components/google_tasks/snapshots/test_todo.ambr index 98b59b7697b..7d6eb920593 100644 --- a/tests/components/google_tasks/snapshots/test_todo.ambr +++ b/tests/components/google_tasks/snapshots/test_todo.ambr @@ -1,11 +1,29 @@ # serializer version: 1 -# name: test_create_todo_list_item[api_responses0] +# name: test_create_todo_list_item[description] tuple( 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks?alt=json', 'POST', ) # --- -# name: test_create_todo_list_item[api_responses0].1 +# name: test_create_todo_list_item[description].1 + '{"title": "Soda", "status": "needsAction", "notes": "6-pack"}' +# --- +# name: test_create_todo_list_item[due] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks?alt=json', + 'POST', + ) +# --- +# name: test_create_todo_list_item[due].1 + '{"title": "Soda", "status": "needsAction", "due": "2023-11-18T00:00:00-08:00"}' +# --- +# name: test_create_todo_list_item[summary] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks?alt=json', + 'POST', + ) +# --- +# name: test_create_todo_list_item[summary].1 '{"title": "Soda", "status": "needsAction"}' # --- # name: test_delete_todo_list_item[_handler] @@ -14,6 +32,57 @@ 'POST', ) # --- +# name: test_parent_child_ordering[api_responses0] + list([ + dict({ + 'status': 'needs_action', + 'summary': 'Task 1', + 'uid': 'task-1', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Task 2', + 'uid': 'task-2', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Task 3 (Parent)', + 'uid': 'task-3', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Task 4', + 'uid': 'task-4', + }), + ]) +# --- +# name: test_partial_update[description] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_partial_update[description].1 + '{"notes": "6-pack"}' +# --- +# name: test_partial_update[due_date] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_partial_update[due_date].1 + '{"due": "2023-11-18T00:00:00-08:00"}' +# --- +# name: test_partial_update[rename] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_partial_update[rename].1 + '{"title": "Soda"}' +# --- # name: test_partial_update_status[api_responses0] tuple( 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', @@ -23,15 +92,6 @@ # name: test_partial_update_status[api_responses0].1 '{"status": "needsAction"}' # --- -# name: test_partial_update_title[api_responses0] - tuple( - 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', - 'PATCH', - ) -# --- -# name: test_partial_update_title[api_responses0].1 - '{"title": "Soda"}' -# --- # name: test_update_todo_list_item[api_responses0] tuple( 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index 7b11372f1d4..3329f89c1ca 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -19,13 +19,12 @@ from homeassistant.exceptions import HomeAssistantError from tests.typing import WebSocketGenerator ENTITY_ID = "todo.my_tasks" +ITEM = { + "id": "task-list-id-1", + "title": "My tasks", +} LIST_TASK_LIST_RESPONSE = { - "items": [ - { - "id": "task-list-id-1", - "title": "My tasks", - }, - ] + "items": [ITEM], } EMPTY_RESPONSE = {} LIST_TASKS_RESPONSE = { @@ -45,17 +44,51 @@ BOUNDARY = "batch_00972cc8-75bd-11ee-9692-0242ac110002" # Arbitrary uuid LIST_TASKS_RESPONSE_WATER = { "items": [ - {"id": "some-task-id", "title": "Water", "status": "needsAction"}, + { + "id": "some-task-id", + "title": "Water", + "status": "needsAction", + "position": "00000000000000000001", + }, ], } LIST_TASKS_RESPONSE_MULTIPLE = { "items": [ - {"id": "some-task-id-1", "title": "Water", "status": "needsAction"}, - {"id": "some-task-id-2", "title": "Milk", "status": "needsAction"}, - {"id": "some-task-id-3", "title": "Cheese", "status": "needsAction"}, + { + "id": "some-task-id-2", + "title": "Milk", + "status": "needsAction", + "position": "00000000000000000002", + }, + { + "id": "some-task-id-1", + "title": "Water", + "status": "needsAction", + "position": "00000000000000000001", + }, + { + "id": "some-task-id-3", + "title": "Cheese", + "status": "needsAction", + "position": "00000000000000000003", + }, ], } +# API responses when testing update methods +UPDATE_API_RESPONSES = [ + LIST_TASK_LIST_RESPONSE, + LIST_TASKS_RESPONSE_WATER, + EMPTY_RESPONSE, # update + LIST_TASKS_RESPONSE, # refresh after update +] +CREATE_API_RESPONSES = [ + LIST_TASK_LIST_RESPONSE, + LIST_TASKS_RESPONSE, + EMPTY_RESPONSE, # create + LIST_TASKS_RESPONSE, # refresh +] + @pytest.fixture def platforms() -> list[str]: @@ -63,39 +96,22 @@ def platforms() -> list[str]: return [Platform.TODO] -@pytest.fixture -def ws_req_id() -> Callable[[], int]: - """Fixture for incremental websocket requests.""" - - id = 0 - - def next_id() -> int: - nonlocal id - id += 1 - return id - - return next_id - - @pytest.fixture async def ws_get_items( - hass_ws_client: WebSocketGenerator, ws_req_id: Callable[[], int] + hass_ws_client: WebSocketGenerator, ) -> Callable[[], Awaitable[dict[str, str]]]: """Fixture to fetch items from the todo websocket.""" async def get() -> list[dict[str, str]]: # Fetch items using To-do platform client = await hass_ws_client() - id = ws_req_id() - await client.send_json( + await client.send_json_auto_id( { - "id": id, "type": "todo/item/list", "entity_id": ENTITY_ID, } ) resp = await client.receive_json() - assert resp.get("id") == id assert resp.get("success") return resp.get("result", {}).get("items", []) @@ -199,8 +215,20 @@ def mock_http_response(response_handler: list | Callable) -> Mock: LIST_TASK_LIST_RESPONSE, { "items": [ - {"id": "task-1", "title": "Task 1", "status": "needsAction"}, - {"id": "task-2", "title": "Task 2", "status": "completed"}, + { + "id": "task-1", + "title": "Task 1", + "status": "needsAction", + "position": "0000000000000001", + "due": "2023-11-18T00:00:00+00:00", + }, + { + "id": "task-2", + "title": "Task 2", + "status": "completed", + "position": "0000000000000002", + "notes": "long description", + }, ], }, ] @@ -225,11 +253,13 @@ async def test_get_items( "uid": "task-1", "summary": "Task 1", "status": "needs_action", + "due": "2023-11-18", }, { "uid": "task-2", "summary": "Task 2", "status": "completed", + "description": "long description", }, ] @@ -320,21 +350,20 @@ async def test_task_items_error_response( @pytest.mark.parametrize( - "api_responses", + ("api_responses", "item_data"), [ - [ - LIST_TASK_LIST_RESPONSE, - LIST_TASKS_RESPONSE, - EMPTY_RESPONSE, # create - LIST_TASKS_RESPONSE, # refresh after delete - ] + (CREATE_API_RESPONSES, {}), + (CREATE_API_RESPONSES, {"due_date": "2023-11-18"}), + (CREATE_API_RESPONSES, {"description": "6-pack"}), ], + ids=["summary", "due", "description"], ) async def test_create_todo_list_item( hass: HomeAssistant, setup_credentials: None, integration_setup: Callable[[], Awaitable[bool]], mock_http_response: Mock, + item_data: dict[str, Any], snapshot: SnapshotAssertion, ) -> None: """Test for creating a To-do Item.""" @@ -348,7 +377,7 @@ async def test_create_todo_list_item( await hass.services.async_call( TODO_DOMAIN, "add_item", - {"item": "Soda"}, + {"item": "Soda", **item_data}, target={"entity_id": "todo.my_tasks"}, blocking=True, ) @@ -394,17 +423,7 @@ async def test_create_todo_list_item_error( ) -@pytest.mark.parametrize( - "api_responses", - [ - [ - LIST_TASK_LIST_RESPONSE, - LIST_TASKS_RESPONSE_WATER, - EMPTY_RESPONSE, # update - LIST_TASKS_RESPONSE, # refresh after update - ] - ], -) +@pytest.mark.parametrize("api_responses", [UPDATE_API_RESPONSES]) async def test_update_todo_list_item( hass: HomeAssistant, setup_credentials: None, @@ -470,21 +489,20 @@ async def test_update_todo_list_item_error( @pytest.mark.parametrize( - "api_responses", + ("api_responses", "item_data"), [ - [ - LIST_TASK_LIST_RESPONSE, - LIST_TASKS_RESPONSE_WATER, - EMPTY_RESPONSE, # update - LIST_TASKS_RESPONSE, # refresh after update - ] + (UPDATE_API_RESPONSES, {"rename": "Soda"}), + (UPDATE_API_RESPONSES, {"due_date": "2023-11-18"}), + (UPDATE_API_RESPONSES, {"description": "6-pack"}), ], + ids=("rename", "due_date", "description"), ) -async def test_partial_update_title( +async def test_partial_update( hass: HomeAssistant, setup_credentials: None, integration_setup: Callable[[], Awaitable[bool]], mock_http_response: Any, + item_data: dict[str, Any], snapshot: SnapshotAssertion, ) -> None: """Test for partial update with title only.""" @@ -498,7 +516,7 @@ async def test_partial_update_title( await hass.services.async_call( TODO_DOMAIN, "update_item", - {"item": "some-task-id", "rename": "Soda"}, + {"item": "some-task-id", **item_data}, target={"entity_id": "todo.my_tasks"}, blocking=True, ) @@ -509,17 +527,7 @@ async def test_partial_update_title( assert call.kwargs.get("body") == snapshot -@pytest.mark.parametrize( - "api_responses", - [ - [ - LIST_TASK_LIST_RESPONSE, - LIST_TASKS_RESPONSE_WATER, - EMPTY_RESPONSE, # update - LIST_TASKS_RESPONSE, # refresh after update - ] - ], -) +@pytest.mark.parametrize("api_responses", [UPDATE_API_RESPONSES]) async def test_partial_update_status( hass: HomeAssistant, setup_credentials: None, @@ -558,7 +566,7 @@ async def test_partial_update_status( LIST_TASK_LIST_RESPONSE, LIST_TASKS_RESPONSE_MULTIPLE, [EMPTY_RESPONSE, EMPTY_RESPONSE, EMPTY_RESPONSE], # Delete batch - LIST_TASKS_RESPONSE, # refresh after create + LIST_TASKS_RESPONSE, # refresh after delete ] ) ) @@ -714,3 +722,148 @@ async def test_delete_server_error( target={"entity_id": "todo.my_tasks"}, blocking=True, ) + + +@pytest.mark.parametrize( + "api_responses", + [ + [ + LIST_TASK_LIST_RESPONSE, + { + "items": [ + { + "id": "task-3-2", + "title": "Child 2", + "status": "needsAction", + "parent": "task-3", + "position": "0000000000000002", + }, + { + "id": "task-3", + "title": "Task 3 (Parent)", + "status": "needsAction", + "position": "0000000000000003", + }, + { + "id": "task-2", + "title": "Task 2", + "status": "needsAction", + "position": "0000000000000002", + }, + { + "id": "task-1", + "title": "Task 1", + "status": "needsAction", + "position": "0000000000000001", + }, + { + "id": "task-3-1", + "title": "Child 1", + "status": "needsAction", + "parent": "task-3", + "position": "0000000000000001", + }, + { + "id": "task-4", + "title": "Task 4", + "status": "needsAction", + "position": "0000000000000004", + }, + ], + }, + ] + ], +) +async def test_parent_child_ordering( + hass: HomeAssistant, + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], + ws_get_items: Callable[[], Awaitable[dict[str, str]]], + snapshot: SnapshotAssertion, +) -> None: + """Test getting todo list items.""" + + assert await integration_setup() + + state = hass.states.get("todo.my_tasks") + assert state + assert state.state == "4" + + items = await ws_get_items() + assert items == snapshot + + +@pytest.mark.parametrize( + "api_responses", + [ + [ + LIST_TASK_LIST_RESPONSE, + LIST_TASKS_RESPONSE_WATER, + EMPTY_RESPONSE, # update + # refresh after update + { + "items": [ + { + "id": "some-task-id", + "title": "Milk", + "status": "needsAction", + "position": "0000000000000001", + }, + ], + }, + ] + ], +) +async def test_susbcribe( + hass: HomeAssistant, + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], + hass_ws_client: WebSocketGenerator, +) -> None: + """Test subscribing to item updates.""" + + assert await integration_setup() + + # Subscribe and get the initial list + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "todo/item/subscribe", + "entity_id": "todo.my_tasks", + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + subscription_id = msg["id"] + + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "Water" + assert items[0]["status"] == "needs_action" + uid = items[0]["uid"] + assert uid + + # Rename item + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + {"item": uid, "rename": "Milk"}, + target={"entity_id": "todo.my_tasks"}, + blocking=True, + ) + + # Verify update is published + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "Milk" + assert items[0]["status"] == "needs_action" + assert "uid" in items[0] diff --git a/tests/components/group/test_config_flow.py b/tests/components/group/test_config_flow.py index 3189e344c62..7b83ed9eb0d 100644 --- a/tests/components/group/test_config_flow.py +++ b/tests/components/group/test_config_flow.py @@ -699,4 +699,4 @@ async def test_option_flow_sensor_preview_config_entry_removed( ) msg = await client.receive_json() assert not msg["success"] - assert msg["error"] == {"code": "unknown_error", "message": "Unknown error"} + assert msg["error"] == {"code": "home_assistant_error", "message": "Unknown error"} diff --git a/tests/components/guardian/conftest.py b/tests/components/guardian/conftest.py index acf59aeea86..f2cde0a553d 100644 --- a/tests/components/guardian/conftest.py +++ b/tests/components/guardian/conftest.py @@ -131,9 +131,10 @@ async def setup_guardian_fixture( "aioguardian.commands.wifi.WiFiCommands.status", return_value=data_wifi_status, ), patch( - "aioguardian.client.Client.disconnect" + "aioguardian.client.Client.disconnect", ), patch( - "homeassistant.components.guardian.PLATFORMS", [] + "homeassistant.components.guardian.PLATFORMS", + [], ): assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() diff --git a/tests/components/harmony/test_switch.py b/tests/components/harmony/test_switch.py index 58cbd3eac56..59e5a7c7fc8 100644 --- a/tests/components/harmony/test_switch.py +++ b/tests/components/harmony/test_switch.py @@ -1,7 +1,10 @@ """Test the Logitech Harmony Hub activity switches.""" from datetime import timedelta +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity from homeassistant.components.harmony.const import DOMAIN +from homeassistant.components.script import scripts_with_entity from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -17,6 +20,8 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component from homeassistant.util import utcnow from .const import ENTITY_PLAY_MUSIC, ENTITY_REMOTE, ENTITY_WATCH_TV, HUB_NAME @@ -133,3 +138,62 @@ async def _toggle_switch_and_wait(hass, service_name, entity): blocking=True, ) await hass.async_block_till_done() + + +async def test_create_issue( + harmony_client, + mock_hc, + hass: HomeAssistant, + mock_write_config, + entity_registry_enabled_by_default: None, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": ENTITY_WATCH_TV}, + "action": {"service": "switch.turn_on", "entity_id": ENTITY_WATCH_TV}, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "service": "switch.turn_on", + "data": {"entity_id": ENTITY_WATCH_TV}, + }, + ], + } + } + }, + ) + + entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "192.0.2.0", CONF_NAME: HUB_NAME} + ) + + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert automations_with_entity(hass, ENTITY_WATCH_TV)[0] == "automation.test" + assert scripts_with_entity(hass, ENTITY_WATCH_TV)[0] == "script.test" + issue_registry: ir.IssueRegistry = ir.async_get(hass) + + assert issue_registry.async_get_issue(DOMAIN, "deprecated_switches") + assert issue_registry.async_get_issue( + DOMAIN, "deprecated_switches_switch.guest_room_watch_tv_automation.test" + ) + assert issue_registry.async_get_issue( + DOMAIN, "deprecated_switches_switch.guest_room_watch_tv_script.test" + ) + + assert len(issue_registry.issues) == 3 diff --git a/tests/components/hassio/conftest.py b/tests/components/hassio/conftest.py index 22051808ccc..0cce33f6dfd 100644 --- a/tests/components/hassio/conftest.py +++ b/tests/components/hassio/conftest.py @@ -54,9 +54,9 @@ def hassio_stubs(hassio_env, hass, hass_client, aioclient_mock): "homeassistant.components.hassio.HassIO.get_ingress_panels", return_value={"panels": []}, ), patch( - "homeassistant.components.hassio.issues.SupervisorIssues.setup" + "homeassistant.components.hassio.issues.SupervisorIssues.setup", ), patch( - "homeassistant.components.hassio.HassIO.refresh_updates" + "homeassistant.components.hassio.HassIO.refresh_updates", ): hass.state = CoreState.starting hass.loop.run_until_complete(async_setup_component(hass, "hassio", {})) diff --git a/tests/components/hassio/test_discovery.py b/tests/components/hassio/test_discovery.py index 5c4717fd561..0923967a480 100644 --- a/tests/components/hassio/test_discovery.py +++ b/tests/components/hassio/test_discovery.py @@ -12,12 +12,7 @@ from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_S from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import ( - MockModule, - mock_config_flow, - mock_entity_platform, - mock_integration, -) +from tests.common import MockModule, mock_config_flow, mock_integration, mock_platform from tests.test_util.aiohttp import AiohttpClientMocker @@ -25,7 +20,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def mock_mqtt_fixture(hass): """Mock the MQTT integration's config flow.""" mock_integration(hass, MockModule(MQTT_DOMAIN)) - mock_entity_platform(hass, f"config_flow.{MQTT_DOMAIN}", None) + mock_platform(hass, f"{MQTT_DOMAIN}.config_flow", None) class MqttFlow(config_entries.ConfigFlow): """Test flow.""" diff --git a/tests/components/homeassistant/test_scene.py b/tests/components/homeassistant/test_scene.py index 085ed4f0641..d754c67ad49 100644 --- a/tests/components/homeassistant/test_scene.py +++ b/tests/components/homeassistant/test_scene.py @@ -8,6 +8,7 @@ from homeassistant.components.homeassistant import scene as ha_scene from homeassistant.components.homeassistant.scene import EVENT_SCENE_RELOADED from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.setup import async_setup_component from tests.common import async_capture_events, async_mock_service @@ -164,6 +165,65 @@ async def test_create_service( assert scene.attributes.get("entity_id") == ["light.kitchen"] +async def test_delete_service( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test the delete service.""" + assert await async_setup_component( + hass, + "scene", + {"scene": {"name": "hallo_2", "entities": {"light.kitchen": "on"}}}, + ) + + await hass.services.async_call( + "scene", + "create", + { + "scene_id": "hallo", + "entities": {"light.bed_light": {"state": "on", "brightness": 50}}, + }, + blocking=True, + ) + await hass.async_block_till_done() + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + "scene", + "delete", + { + "entity_id": "scene.hallo_3", + }, + blocking=True, + ) + await hass.async_block_till_done() + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + "scene", + "delete", + { + "entity_id": "scene.hallo_2", + }, + blocking=True, + ) + await hass.async_block_till_done() + assert hass.states.get("scene.hallo_2") is not None + + assert hass.states.get("scene.hallo") is not None + + await hass.services.async_call( + "scene", + "delete", + { + "entity_id": "scene.hallo", + }, + blocking=True, + ) + await hass.async_block_till_done() + + assert hass.states.get("state.hallo") is None + + async def test_snapshot_service( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py index fbc77cdee9e..f58d561bfb3 100644 --- a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py +++ b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py @@ -293,7 +293,14 @@ async def test_option_flow_install_multi_pan_addon_zha( config_entry.add_to_hass(hass) zha_config_entry = MockConfigEntry( - data={"device": {"path": "/dev/ttyTEST123"}, "radio_type": "ezsp"}, + data={ + "device": { + "path": "/dev/ttyTEST123", + "baudrate": 115200, + "flow_control": None, + }, + "radio_type": "ezsp", + }, domain=ZHA_DOMAIN, options={}, title="Test", @@ -348,8 +355,8 @@ async def test_option_flow_install_multi_pan_addon_zha( assert zha_config_entry.data == { "device": { "path": "socket://core-silabs-multiprotocol:9999", - "baudrate": 57600, # ZHA default - "flow_control": "software", # ZHA default + "baudrate": 115200, + "flow_control": None, }, "radio_type": "ezsp", } diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index 4d43d29463a..65636b27a16 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -337,8 +337,8 @@ async def test_option_flow_install_multi_pan_addon_zha( assert zha_config_entry.data == { "device": { "path": "socket://core-silabs-multiprotocol:9999", - "baudrate": 57600, # ZHA default - "flow_control": "software", # ZHA default + "baudrate": 115200, + "flow_control": None, }, "radio_type": "ezsp", } diff --git a/tests/components/homeassistant_sky_connect/test_init.py b/tests/components/homeassistant_sky_connect/test_init.py index e00603dc8f7..11961c09a2d 100644 --- a/tests/components/homeassistant_sky_connect/test_init.py +++ b/tests/components/homeassistant_sky_connect/test_init.py @@ -147,7 +147,7 @@ async def test_setup_zha( assert config_entry.data == { "device": { "baudrate": 115200, - "flow_control": "software", + "flow_control": None, "path": CONFIG_ENTRY_DATA["device"], }, "radio_type": "ezsp", @@ -200,8 +200,8 @@ async def test_setup_zha_multipan( config_entry = hass.config_entries.async_entries("zha")[0] assert config_entry.data == { "device": { - "baudrate": 57600, # ZHA default - "flow_control": "software", # ZHA default + "baudrate": 115200, + "flow_control": None, "path": "socket://core-silabs-multiprotocol:9999", }, "radio_type": "ezsp", @@ -255,7 +255,7 @@ async def test_setup_zha_multipan_other_device( assert config_entry.data == { "device": { "baudrate": 115200, - "flow_control": "software", + "flow_control": None, "path": CONFIG_ENTRY_DATA["device"], }, "radio_type": "ezsp", diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 58d47c41987..242b316de66 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -249,8 +249,8 @@ async def test_option_flow_install_multi_pan_addon_zha( assert zha_config_entry.data == { "device": { "path": "socket://core-silabs-multiprotocol:9999", - "baudrate": 57600, # ZHA default - "flow_control": "software", # ZHA default + "baudrate": 115200, + "flow_control": None, }, "radio_type": "ezsp", } diff --git a/tests/components/homeassistant_yellow/test_init.py b/tests/components/homeassistant_yellow/test_init.py index addc519c865..f8cdcd8a13b 100644 --- a/tests/components/homeassistant_yellow/test_init.py +++ b/tests/components/homeassistant_yellow/test_init.py @@ -145,8 +145,8 @@ async def test_setup_zha_multipan( config_entry = hass.config_entries.async_entries("zha")[0] assert config_entry.data == { "device": { - "baudrate": 57600, # ZHA default - "flow_control": "software", # ZHA default + "baudrate": 115200, + "flow_control": None, "path": "socket://core-silabs-multiprotocol:9999", }, "radio_type": "ezsp", diff --git a/tests/components/homekit/conftest.py b/tests/components/homekit/conftest.py index fe151c902cb..8c6d4328065 100644 --- a/tests/components/homekit/conftest.py +++ b/tests/components/homekit/conftest.py @@ -31,7 +31,7 @@ def run_driver(hass, event_loop, iid_storage): ), patch("pyhap.accessory_driver.HAPServer"), patch( "pyhap.accessory_driver.AccessoryDriver.publish" ), patch( - "pyhap.accessory_driver.AccessoryDriver.persist" + "pyhap.accessory_driver.AccessoryDriver.persist", ): yield HomeDriver( hass, @@ -53,9 +53,9 @@ def hk_driver(hass, event_loop, iid_storage): ), patch("pyhap.accessory_driver.HAPServer.async_stop"), patch( "pyhap.accessory_driver.HAPServer.async_start" ), patch( - "pyhap.accessory_driver.AccessoryDriver.publish" + "pyhap.accessory_driver.AccessoryDriver.publish", ), patch( - "pyhap.accessory_driver.AccessoryDriver.persist" + "pyhap.accessory_driver.AccessoryDriver.persist", ): yield HomeDriver( hass, @@ -77,13 +77,13 @@ def mock_hap(hass, event_loop, iid_storage, mock_zeroconf): ), patch("pyhap.accessory_driver.HAPServer.async_stop"), patch( "pyhap.accessory_driver.HAPServer.async_start" ), patch( - "pyhap.accessory_driver.AccessoryDriver.publish" + "pyhap.accessory_driver.AccessoryDriver.publish", ), patch( - "pyhap.accessory_driver.AccessoryDriver.async_start" + "pyhap.accessory_driver.AccessoryDriver.async_start", ), patch( - "pyhap.accessory_driver.AccessoryDriver.async_stop" + "pyhap.accessory_driver.AccessoryDriver.async_stop", ), patch( - "pyhap.accessory_driver.AccessoryDriver.persist" + "pyhap.accessory_driver.AccessoryDriver.persist", ): yield HomeDriver( hass, diff --git a/tests/components/homekit/test_homekit.py b/tests/components/homekit/test_homekit.py index 158efa477d4..1d42325d54c 100644 --- a/tests/components/homekit/test_homekit.py +++ b/tests/components/homekit/test_homekit.py @@ -1202,9 +1202,7 @@ async def test_homekit_reset_accessories_not_supported( "pyhap.accessory_driver.AccessoryDriver.async_update_advertisement" ) as hk_driver_async_update_advertisement, patch( "pyhap.accessory_driver.AccessoryDriver.async_start" - ), patch.object( - homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0 - ): + ), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0): await async_init_entry(hass, entry) acc_mock = MagicMock() @@ -1247,9 +1245,7 @@ async def test_homekit_reset_accessories_state_missing( "pyhap.accessory_driver.AccessoryDriver.config_changed" ) as hk_driver_config_changed, patch( "pyhap.accessory_driver.AccessoryDriver.async_start" - ), patch.object( - homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0 - ): + ), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0): await async_init_entry(hass, entry) acc_mock = MagicMock() @@ -1291,9 +1287,7 @@ async def test_homekit_reset_accessories_not_bridged( "pyhap.accessory_driver.AccessoryDriver.async_update_advertisement" ) as hk_driver_async_update_advertisement, patch( "pyhap.accessory_driver.AccessoryDriver.async_start" - ), patch.object( - homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0 - ): + ), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0): await async_init_entry(hass, entry) assert hk_driver_async_update_advertisement.call_count == 0 @@ -1338,7 +1332,7 @@ async def test_homekit_reset_single_accessory( ) as hk_driver_async_update_advertisement, patch( "pyhap.accessory_driver.AccessoryDriver.async_start" ), patch( - f"{PATH_HOMEKIT}.accessories.HomeAccessory.run" + f"{PATH_HOMEKIT}.accessories.HomeAccessory.run", ) as mock_run: await async_init_entry(hass, entry) homekit.status = STATUS_RUNNING @@ -2071,9 +2065,9 @@ async def test_reload(hass: HomeAssistant, mock_async_zeroconf: None) -> None: ) as mock_homekit2, patch.object(homekit.bridge, "add_accessory"), patch( f"{PATH_HOMEKIT}.async_show_setup_message" ), patch( - f"{PATH_HOMEKIT}.get_accessory" + f"{PATH_HOMEKIT}.get_accessory", ), patch( - "pyhap.accessory_driver.AccessoryDriver.async_start" + "pyhap.accessory_driver.AccessoryDriver.async_start", ), patch( "homeassistant.components.network.async_get_source_ip", return_value="1.2.3.4" ): diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 909e94a0d84..b1f063615f3 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -102,7 +102,7 @@ async def test_hmip_add_device( ), patch.object(reloaded_hap, "async_connect"), patch.object( reloaded_hap, "get_hap", return_value=mock_hap.home ), patch( - "homeassistant.components.homematicip_cloud.hap.asyncio.sleep" + "homeassistant.components.homematicip_cloud.hap.asyncio.sleep", ): mock_hap.home.fire_create_event(event_type=EventType.DEVICE_ADDED) await hass.async_block_till_done() diff --git a/tests/components/homematicip_cloud/test_hap.py b/tests/components/homematicip_cloud/test_hap.py index 4569a6fff6b..0d950968191 100644 --- a/tests/components/homematicip_cloud/test_hap.py +++ b/tests/components/homematicip_cloud/test_hap.py @@ -53,7 +53,8 @@ async def test_auth_auth_check_and_register(hass: HomeAssistant) -> None: ), patch.object( hmip_auth.auth, "requestAuthToken", return_value="ABC" ), patch.object( - hmip_auth.auth, "confirmAuthToken" + hmip_auth.auth, + "confirmAuthToken", ): assert await hmip_auth.async_checkbutton() assert await hmip_auth.async_register() == "ABC" diff --git a/tests/components/homewizard/conftest.py b/tests/components/homewizard/conftest.py index e778c82928b..0c24d9daebe 100644 --- a/tests/components/homewizard/conftest.py +++ b/tests/components/homewizard/conftest.py @@ -1,6 +1,5 @@ """Fixtures for HomeWizard integration tests.""" from collections.abc import Generator -import json from unittest.mock import AsyncMock, MagicMock, patch from homewizard_energy.errors import NotFoundError @@ -11,7 +10,7 @@ from homeassistant.components.homewizard.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry, get_fixture_path, load_fixture +from tests.common import MockConfigEntry, get_fixture_path, load_json_object_fixture @pytest.fixture @@ -35,22 +34,22 @@ def mock_homewizardenergy( client = homewizard.return_value client.device.return_value = Device.from_dict( - json.loads(load_fixture(f"{device_fixture}/device.json", DOMAIN)) + load_json_object_fixture(f"{device_fixture}/device.json", DOMAIN) ) client.data.return_value = Data.from_dict( - json.loads(load_fixture(f"{device_fixture}/data.json", DOMAIN)) + load_json_object_fixture(f"{device_fixture}/data.json", DOMAIN) ) if get_fixture_path(f"{device_fixture}/state.json", DOMAIN).exists(): client.state.return_value = State.from_dict( - json.loads(load_fixture(f"{device_fixture}/state.json", DOMAIN)) + load_json_object_fixture(f"{device_fixture}/state.json", DOMAIN) ) else: client.state.side_effect = NotFoundError if get_fixture_path(f"{device_fixture}/system.json", DOMAIN).exists(): client.system.return_value = System.from_dict( - json.loads(load_fixture(f"{device_fixture}/system.json", DOMAIN)) + load_json_object_fixture(f"{device_fixture}/system.json", DOMAIN) ) else: client.system.side_effect = NotFoundError diff --git a/tests/components/homewizard/fixtures/HWE-P1-zero-values/data.json b/tests/components/homewizard/fixtures/HWE-P1-zero-values/data.json new file mode 100644 index 00000000000..d21b4ed2d4a --- /dev/null +++ b/tests/components/homewizard/fixtures/HWE-P1-zero-values/data.json @@ -0,0 +1,45 @@ +{ + "wifi_ssid": "My Wi-Fi", + "wifi_strength": 100, + "smr_version": 50, + "meter_model": "ISKRA 2M550T-101", + "unique_id": "00112233445566778899AABBCCDDEEFF", + "active_tariff": 2, + "total_power_import_kwh": 0.0, + "total_power_import_t1_kwh": 0.0, + "total_power_import_t2_kwh": 0.0, + "total_power_import_t3_kwh": 0.0, + "total_power_import_t4_kwh": 0.0, + "total_power_export_kwh": 0.0, + "total_power_export_t1_kwh": 0.0, + "total_power_export_t2_kwh": 0.0, + "total_power_export_t3_kwh": 0.0, + "total_power_export_t4_kwh": 0.0, + "active_power_w": 0.0, + "active_power_l1_w": 0.0, + "active_power_l2_w": 0.0, + "active_power_l3_w": 0.0, + "active_voltage_l1_v": 0.0, + "active_voltage_l2_v": 0.0, + "active_voltage_l3_v": 0.0, + "active_current_l1_a": 0, + "active_current_l2_a": 0, + "active_current_l3_a": 0, + "active_frequency_hz": 0, + "voltage_sag_l1_count": 0, + "voltage_sag_l2_count": 0, + "voltage_sag_l3_count": 0, + "voltage_swell_l1_count": 0, + "voltage_swell_l2_count": 0, + "voltage_swell_l3_count": 0, + "any_power_fail_count": 0, + "long_power_fail_count": 0, + "total_gas_m3": 0.0, + "gas_timestamp": 210314112233, + "gas_unique_id": "01FFEEDDCCBBAA99887766554433221100", + "active_power_average_w": 0, + "montly_power_peak_w": 0.0, + "montly_power_peak_timestamp": 230101080010, + "active_liter_lpm": 0.0, + "total_liter_m3": 0.0 +} diff --git a/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json b/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json new file mode 100644 index 00000000000..4972c491859 --- /dev/null +++ b/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json @@ -0,0 +1,7 @@ +{ + "product_type": "HWE-P1", + "product_name": "P1 meter", + "serial": "3c39e7aabbcc", + "firmware_version": "4.19", + "api_version": "v1" +} diff --git a/tests/components/homewizard/fixtures/HWE-P1-zero-values/system.json b/tests/components/homewizard/fixtures/HWE-P1-zero-values/system.json new file mode 100644 index 00000000000..362491b3519 --- /dev/null +++ b/tests/components/homewizard/fixtures/HWE-P1-zero-values/system.json @@ -0,0 +1,3 @@ +{ + "cloud_enabled": true +} diff --git a/tests/components/homewizard/fixtures/HWE-SKT/data.json b/tests/components/homewizard/fixtures/HWE-SKT/data.json index 7e647952982..f2a465bd40d 100644 --- a/tests/components/homewizard/fixtures/HWE-SKT/data.json +++ b/tests/components/homewizard/fixtures/HWE-SKT/data.json @@ -1,46 +1,8 @@ { "wifi_ssid": "My Wi-Fi", "wifi_strength": 94, - "smr_version": null, - "meter_model": null, - "unique_meter_id": null, - "active_tariff": null, - "total_power_import_kwh": null, "total_power_import_t1_kwh": 63.651, - "total_power_import_t2_kwh": null, - "total_power_import_t3_kwh": null, - "total_power_import_t4_kwh": null, - "total_power_export_kwh": null, "total_power_export_t1_kwh": 0, - "total_power_export_t2_kwh": null, - "total_power_export_t3_kwh": null, - "total_power_export_t4_kwh": null, "active_power_w": 1457.277, - "active_power_l1_w": 1457.277, - "active_power_l2_w": null, - "active_power_l3_w": null, - "active_voltage_l1_v": null, - "active_voltage_l2_v": null, - "active_voltage_l3_v": null, - "active_current_l1_a": null, - "active_current_l2_a": null, - "active_current_l3_a": null, - "active_frequency_hz": null, - "voltage_sag_l1_count": null, - "voltage_sag_l2_count": null, - "voltage_sag_l3_count": null, - "voltage_swell_l1_count": null, - "voltage_swell_l2_count": null, - "voltage_swell_l3_count": null, - "any_power_fail_count": null, - "long_power_fail_count": null, - "active_power_average_w": null, - "monthly_power_peak_w": null, - "monthly_power_peak_timestamp": null, - "total_gas_m3": null, - "gas_timestamp": null, - "gas_unique_id": null, - "active_liter_lpm": null, - "total_liter_m3": null, - "external_devices": null + "active_power_l1_w": 1457.277 } diff --git a/tests/components/homewizard/fixtures/HWE-WTR/data.json b/tests/components/homewizard/fixtures/HWE-WTR/data.json index 169528abef4..16097742891 100644 --- a/tests/components/homewizard/fixtures/HWE-WTR/data.json +++ b/tests/components/homewizard/fixtures/HWE-WTR/data.json @@ -1,46 +1,6 @@ { "wifi_ssid": "My Wi-Fi", "wifi_strength": 84, - "smr_version": null, - "meter_model": null, - "unique_meter_id": null, - "active_tariff": null, - "total_power_import_kwh": null, - "total_power_import_t1_kwh": null, - "total_power_import_t2_kwh": null, - "total_power_import_t3_kwh": null, - "total_power_import_t4_kwh": null, - "total_power_export_kwh": null, - "total_power_export_t1_kwh": null, - "total_power_export_t2_kwh": null, - "total_power_export_t3_kwh": null, - "total_power_export_t4_kwh": null, - "active_power_w": null, - "active_power_l1_w": null, - "active_power_l2_w": null, - "active_power_l3_w": null, - "active_voltage_l1_v": null, - "active_voltage_l2_v": null, - "active_voltage_l3_v": null, - "active_current_l1_a": null, - "active_current_l2_a": null, - "active_current_l3_a": null, - "active_frequency_hz": null, - "voltage_sag_l1_count": null, - "voltage_sag_l2_count": null, - "voltage_sag_l3_count": null, - "voltage_swell_l1_count": null, - "voltage_swell_l2_count": null, - "voltage_swell_l3_count": null, - "any_power_fail_count": null, - "long_power_fail_count": null, - "active_power_average_w": null, - "monthly_power_peak_w": null, - "monthly_power_peak_timestamp": null, - "total_gas_m3": null, - "gas_timestamp": null, - "gas_unique_id": null, "active_liter_lpm": 0, - "total_liter_m3": 17.014, - "external_devices": null + "total_liter_m3": 17.014 } diff --git a/tests/components/homewizard/fixtures/SDM230/data.json b/tests/components/homewizard/fixtures/SDM230/data.json index e4eb045dff2..64fb2533359 100644 --- a/tests/components/homewizard/fixtures/SDM230/data.json +++ b/tests/components/homewizard/fixtures/SDM230/data.json @@ -1,46 +1,8 @@ { "wifi_ssid": "My Wi-Fi", "wifi_strength": 92, - "smr_version": null, - "meter_model": null, - "unique_meter_id": null, - "active_tariff": null, - "total_power_import_kwh": 2.705, "total_power_import_t1_kwh": 2.705, - "total_power_import_t2_kwh": null, - "total_power_import_t3_kwh": null, - "total_power_import_t4_kwh": null, - "total_power_export_kwh": 255.551, "total_power_export_t1_kwh": 255.551, - "total_power_export_t2_kwh": null, - "total_power_export_t3_kwh": null, - "total_power_export_t4_kwh": null, "active_power_w": -1058.296, - "active_power_l1_w": -1058.296, - "active_power_l2_w": null, - "active_power_l3_w": null, - "active_voltage_l1_v": null, - "active_voltage_l2_v": null, - "active_voltage_l3_v": null, - "active_current_l1_a": null, - "active_current_l2_a": null, - "active_current_l3_a": null, - "active_frequency_hz": null, - "voltage_sag_l1_count": null, - "voltage_sag_l2_count": null, - "voltage_sag_l3_count": null, - "voltage_swell_l1_count": null, - "voltage_swell_l2_count": null, - "voltage_swell_l3_count": null, - "any_power_fail_count": null, - "long_power_fail_count": null, - "active_power_average_w": null, - "monthly_power_peak_w": null, - "monthly_power_peak_timestamp": null, - "total_gas_m3": null, - "gas_timestamp": null, - "gas_unique_id": null, - "active_liter_lpm": null, - "total_liter_m3": null, - "external_devices": null + "active_power_l1_w": -1058.296 } diff --git a/tests/components/homewizard/fixtures/SDM630/data.json b/tests/components/homewizard/fixtures/SDM630/data.json new file mode 100644 index 00000000000..ee143220c67 --- /dev/null +++ b/tests/components/homewizard/fixtures/SDM630/data.json @@ -0,0 +1,10 @@ +{ + "wifi_ssid": "My Wi-Fi", + "wifi_strength": 92, + "total_power_import_t1_kwh": 0.101, + "total_power_export_t1_kwh": 0.523, + "active_power_w": -900.194, + "active_power_l1_w": -1058.296, + "active_power_l2_w": 158.102, + "active_power_l3_w": 0.0 +} diff --git a/tests/components/homewizard/fixtures/SDM630/device.json b/tests/components/homewizard/fixtures/SDM630/device.json new file mode 100644 index 00000000000..b8ec1d18fe8 --- /dev/null +++ b/tests/components/homewizard/fixtures/SDM630/device.json @@ -0,0 +1,7 @@ +{ + "product_type": "SDM630-wifi", + "product_name": "KWh meter 3-phase", + "serial": "3c39e7aabbcc", + "firmware_version": "3.06", + "api_version": "v1" +} diff --git a/tests/components/homewizard/fixtures/SDM630/system.json b/tests/components/homewizard/fixtures/SDM630/system.json new file mode 100644 index 00000000000..362491b3519 --- /dev/null +++ b/tests/components/homewizard/fixtures/SDM630/system.json @@ -0,0 +1,3 @@ +{ + "cloud_enabled": true +} diff --git a/tests/components/homewizard/snapshots/test_diagnostics.ambr b/tests/components/homewizard/snapshots/test_diagnostics.ambr index a5c3e6ed8ba..01094ec2698 100644 --- a/tests/components/homewizard/snapshots/test_diagnostics.ambr +++ b/tests/components/homewizard/snapshots/test_diagnostics.ambr @@ -95,12 +95,12 @@ 'monthly_power_peak_timestamp': None, 'monthly_power_peak_w': None, 'smr_version': None, - 'total_energy_export_kwh': None, + 'total_energy_export_kwh': 0, 'total_energy_export_t1_kwh': 0, 'total_energy_export_t2_kwh': None, 'total_energy_export_t3_kwh': None, 'total_energy_export_t4_kwh': None, - 'total_energy_import_kwh': None, + 'total_energy_import_kwh': 63.651, 'total_energy_import_t1_kwh': 63.651, 'total_energy_import_t2_kwh': None, 'total_energy_import_t3_kwh': None, @@ -265,7 +265,78 @@ 'serial': '**REDACTED**', }), 'state': None, - 'system': None, + 'system': dict({ + 'cloud_enabled': True, + }), + }), + 'entry': dict({ + 'ip_address': '**REDACTED**', + 'product_name': 'Product name', + 'product_type': 'product_type', + 'serial': '**REDACTED**', + }), + }) +# --- +# name: test_diagnostics[SDM630] + dict({ + 'data': dict({ + 'data': dict({ + 'active_current_l1_a': None, + 'active_current_l2_a': None, + 'active_current_l3_a': None, + 'active_frequency_hz': None, + 'active_liter_lpm': None, + 'active_power_average_w': None, + 'active_power_l1_w': -1058.296, + 'active_power_l2_w': 158.102, + 'active_power_l3_w': 0.0, + 'active_power_w': -900.194, + 'active_tariff': None, + 'active_voltage_l1_v': None, + 'active_voltage_l2_v': None, + 'active_voltage_l3_v': None, + 'any_power_fail_count': None, + 'external_devices': None, + 'gas_timestamp': None, + 'gas_unique_id': None, + 'long_power_fail_count': None, + 'meter_model': None, + 'monthly_power_peak_timestamp': None, + 'monthly_power_peak_w': None, + 'smr_version': None, + 'total_energy_export_kwh': 0.523, + 'total_energy_export_t1_kwh': 0.523, + 'total_energy_export_t2_kwh': None, + 'total_energy_export_t3_kwh': None, + 'total_energy_export_t4_kwh': None, + 'total_energy_import_kwh': 0.101, + 'total_energy_import_t1_kwh': 0.101, + 'total_energy_import_t2_kwh': None, + 'total_energy_import_t3_kwh': None, + 'total_energy_import_t4_kwh': None, + 'total_gas_m3': None, + 'total_liter_m3': None, + 'unique_meter_id': None, + 'voltage_sag_l1_count': None, + 'voltage_sag_l2_count': None, + 'voltage_sag_l3_count': None, + 'voltage_swell_l1_count': None, + 'voltage_swell_l2_count': None, + 'voltage_swell_l3_count': None, + 'wifi_ssid': '**REDACTED**', + 'wifi_strength': 92, + }), + 'device': dict({ + 'api_version': 'v1', + 'firmware_version': '3.06', + 'product_name': 'KWh meter 3-phase', + 'product_type': 'SDM630-wifi', + 'serial': '**REDACTED**', + }), + 'state': None, + 'system': dict({ + 'cloud_enabled': True, + }), }), 'entry': dict({ 'ip_address': '**REDACTED**', diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index 4f1db0ac751..e237edee58e 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -3244,7 +3244,7 @@ 'state': '100', }) # --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_active_water_usage:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_average_demand:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -3267,177 +3267,16 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '2.03', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_active_water_usage:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.device_active_water_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:water', - 'original_name': 'Active water usage', - 'platform': 'homewizard', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', - 'unit_of_measurement': 'l/min', - }) -# --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_active_water_usage:state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device Active water usage', - 'icon': 'mdi:water', - 'state_class': , - 'unit_of_measurement': 'l/min', - }), - 'context': , - 'entity_id': 'sensor.device_active_water_usage', - 'last_changed': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_total_water_usage:device-registry] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - tuple( - 'mac', - '3c:39:e7:aa:bb:cc', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'homewizard', - '3c39e7aabbcc', - ), - }), - 'is_new': False, - 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', - 'name': 'Device', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': '2.03', - 'via_device_id': None, - }) -# --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_total_water_usage:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.device_total_water_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:gauge', - 'original_name': 'Total water usage', - 'platform': 'homewizard', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', - 'unit_of_measurement': , - }) -# --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_total_water_usage:state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'water', - 'friendly_name': 'Device Total water usage', - 'icon': 'mdi:gauge', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.device_total_water_usage', - 'last_changed': , - 'last_updated': , - 'state': '17.014', - }) -# --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_wi_fi_ssid:device-registry] - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - tuple( - 'mac', - '3c:39:e7:aa:bb:cc', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'homewizard', - '3c39e7aabbcc', - ), - }), - 'is_new': False, - 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', - 'name': 'Device', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': '2.03', - 'via_device_id': None, - }) -# --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_wi_fi_ssid:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_average_demand:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3448,8 +3287,8 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.device_wi_fi_ssid', + 'entity_category': None, + 'entity_id': 'sensor.device_active_average_demand', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -3457,31 +3296,32 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Wi-Fi SSID', + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active average demand', 'platform': 'homewizard', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', - 'unit_of_measurement': None, + 'translation_key': 'active_power_average_w', + 'unique_id': 'aabbccddeeff_active_power_average_w', + 'unit_of_measurement': , }) # --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_wi_fi_ssid:state] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_average_demand:state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device Wi-Fi SSID', - 'icon': 'mdi:wifi', + 'device_class': 'power', + 'friendly_name': 'Device Active average demand', + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.device_wi_fi_ssid', + 'entity_id': 'sensor.device_active_average_demand', 'last_changed': , 'last_updated': , - 'state': 'My Wi-Fi', + 'state': '0', }) # --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_wi_fi_strength:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_1:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -3504,16 +3344,16 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '2.03', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_wi_fi_strength:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_1:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3526,8 +3366,8 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.device_wi_fi_strength', + 'entity_category': None, + 'entity_id': 'sensor.device_active_current_phase_1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -3535,33 +3375,33 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, - 'original_icon': 'mdi:wifi', - 'original_name': 'Wi-Fi strength', + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active current phase 1', 'platform': 'homewizard', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', - 'unit_of_measurement': '%', + 'translation_key': 'active_current_l1_a', + 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unit_of_measurement': , }) # --- -# name: test_sensors[HWE-WTR-entity_ids1][sensor.device_wi_fi_strength:state] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_1:state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Device Wi-Fi strength', - 'icon': 'mdi:wifi', + 'device_class': 'current', + 'friendly_name': 'Device Active current phase 1', 'state_class': , - 'unit_of_measurement': '%', + 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.device_wi_fi_strength', + 'entity_id': 'sensor.device_active_current_phase_1', 'last_changed': , 'last_updated': , - 'state': '84', + 'state': '0', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_active_power:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_2:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -3584,16 +3424,256 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '3.06', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_active_power:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_2:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active current phase 2', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_current_l2_a', + 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_2:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Device Active current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_current_phase_2', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_3:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active current phase 3', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_current_l3_a', + 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_current_phase_3:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Device Active current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_current_phase_3', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_frequency:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_frequency:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active frequency', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_frequency_hz', + 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_frequency:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Device Active frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_frequency', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3629,7 +3709,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_active_power:state] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -3641,10 +3721,10 @@ 'entity_id': 'sensor.device_active_power', 'last_changed': , 'last_updated': , - 'state': '-1058.296', + 'state': '0.0', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_active_power_phase_1:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_1:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -3667,16 +3747,16 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '3.06', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_active_power_phase_1:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_1:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3712,7 +3792,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_active_power_phase_1:state] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_1:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -3724,10 +3804,10 @@ 'entity_id': 'sensor.device_active_power_phase_1', 'last_changed': , 'last_updated': , - 'state': '-1058.296', + 'state': '0.0', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_export:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_2:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -3750,16 +3830,731 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '3.06', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_export:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_2:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power phase 2', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_l2_w', + 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_2:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power_phase_2', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_3:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power phase 3', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_l3_w', + 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_power_phase_3:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power_phase_3', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_1:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_1:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active voltage phase 1', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_voltage_l1_v', + 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_1:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Device Active voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_voltage_phase_1', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_2:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_2:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active voltage phase 2', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_voltage_l2_v', + 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_2:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Device Active voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_voltage_phase_2', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_3:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active voltage phase 3', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_voltage_l3_v', + 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_voltage_phase_3:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Device Active voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_voltage_phase_3', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_water_usage:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_water_usage:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_water_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:water', + 'original_name': 'Active water usage', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_liter_lpm', + 'unique_id': 'aabbccddeeff_active_liter_lpm', + 'unit_of_measurement': 'l/min', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_active_water_usage:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Active water usage', + 'icon': 'mdi:water', + 'state_class': , + 'unit_of_measurement': 'l/min', + }), + 'context': , + 'entity_id': 'sensor.device_active_water_usage', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_long_power_failures_detected:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_long_power_failures_detected:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_long_power_failures_detected', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:transmission-tower-off', + 'original_name': 'Long power failures detected', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'long_power_fail_count', + 'unique_id': 'aabbccddeeff_long_power_fail_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_long_power_failures_detected:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Long power failures detected', + 'icon': 'mdi:transmission-tower-off', + }), + 'context': , + 'entity_id': 'sensor.device_long_power_failures_detected', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_peak_demand_current_month:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_peak_demand_current_month:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_peak_demand_current_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak demand current month', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'monthly_power_peak_w', + 'unique_id': 'aabbccddeeff_monthly_power_peak_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_peak_demand_current_month:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Peak demand current month', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_peak_demand_current_month', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_power_failures_detected:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_power_failures_detected:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_power_failures_detected', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:transmission-tower-off', + 'original_name': 'Power failures detected', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'any_power_fail_count', + 'unique_id': 'aabbccddeeff_any_power_fail_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_power_failures_detected:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Power failures detected', + 'icon': 'mdi:transmission-tower-off', + }), + 'context': , + 'entity_id': 'sensor.device_power_failures_detected', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3792,7 +4587,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_export:state] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3804,10 +4599,10 @@ 'entity_id': 'sensor.device_total_energy_export', 'last_changed': , 'last_updated': , - 'state': '255.551', + 'state': '0.0', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_export_tariff_1:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_1:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -3830,16 +4625,16 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '3.06', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_export_tariff_1:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_1:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3872,7 +4667,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_export_tariff_1:state] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_1:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3884,10 +4679,10 @@ 'entity_id': 'sensor.device_total_energy_export_tariff_1', 'last_changed': , 'last_updated': , - 'state': '255.551', + 'state': '0.0', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_import:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_2:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -3910,16 +4705,256 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '3.06', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_import:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_2:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_export_tariff_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy export tariff 2', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_export_t2_kwh', + 'unique_id': 'aabbccddeeff_total_power_export_t2_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_2:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy export tariff 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_export_tariff_2', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_3:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_export_tariff_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy export tariff 3', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_export_t3_kwh', + 'unique_id': 'aabbccddeeff_total_power_export_t3_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_3:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy export tariff 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_export_tariff_3', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_4:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_4:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_export_tariff_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy export tariff 4', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_export_t4_kwh', + 'unique_id': 'aabbccddeeff_total_power_export_t4_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_export_tariff_4:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy export tariff 4', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_export_tariff_4', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -3952,7 +4987,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_import:state] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3964,10 +4999,10 @@ 'entity_id': 'sensor.device_total_energy_import', 'last_changed': , 'last_updated': , - 'state': '2.705', + 'state': '0.0', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_import_tariff_1:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_1:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -3990,16 +5025,16 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '3.06', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_import_tariff_1:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_1:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -4032,7 +5067,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_total_energy_import_tariff_1:state] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_1:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -4044,10 +5079,10 @@ 'entity_id': 'sensor.device_total_energy_import_tariff_1', 'last_changed': , 'last_updated': , - 'state': '2.705', + 'state': '0.0', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_wi_fi_ssid:device-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_2:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -4070,16 +5105,1199 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '3.06', + 'sw_version': '4.19', 'via_device_id': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_wi_fi_ssid:entity-registry] +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_2:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_import_tariff_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy import tariff 2', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_import_t2_kwh', + 'unique_id': 'aabbccddeeff_total_power_import_t2_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_2:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy import tariff 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_import_tariff_2', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_3:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_import_tariff_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy import tariff 3', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_import_t3_kwh', + 'unique_id': 'aabbccddeeff_total_power_import_t3_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_3:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy import tariff 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_import_tariff_3', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_4:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_4:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_import_tariff_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy import tariff 4', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_import_t4_kwh', + 'unique_id': 'aabbccddeeff_total_power_import_t4_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_energy_import_tariff_4:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy import tariff 4', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_import_tariff_4', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_gas:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_gas:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_gas', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total gas', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_gas_m3', + 'unique_id': 'aabbccddeeff_total_gas_m3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_gas:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'gas', + 'friendly_name': 'Device Total gas', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_gas', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_water_usage:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_water_usage:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_water_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:gauge', + 'original_name': 'Total water usage', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_liter_m3', + 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_total_water_usage:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Device Total water usage', + 'icon': 'mdi:gauge', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_water_usage', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_1:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_1:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_voltage_sags_detected_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:alert', + 'original_name': 'Voltage sags detected phase 1', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_sag_l1_count', + 'unique_id': 'aabbccddeeff_voltage_sag_l1_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_1:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Voltage sags detected phase 1', + 'icon': 'mdi:alert', + }), + 'context': , + 'entity_id': 'sensor.device_voltage_sags_detected_phase_1', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_2:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_2:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_voltage_sags_detected_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:alert', + 'original_name': 'Voltage sags detected phase 2', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_sag_l2_count', + 'unique_id': 'aabbccddeeff_voltage_sag_l2_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_2:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Voltage sags detected phase 2', + 'icon': 'mdi:alert', + }), + 'context': , + 'entity_id': 'sensor.device_voltage_sags_detected_phase_2', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_3:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_voltage_sags_detected_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:alert', + 'original_name': 'Voltage sags detected phase 3', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_sag_l3_count', + 'unique_id': 'aabbccddeeff_voltage_sag_l3_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_sags_detected_phase_3:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Voltage sags detected phase 3', + 'icon': 'mdi:alert', + }), + 'context': , + 'entity_id': 'sensor.device_voltage_sags_detected_phase_3', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_1:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_1:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_voltage_swells_detected_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:alert', + 'original_name': 'Voltage swells detected phase 1', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_swell_l1_count', + 'unique_id': 'aabbccddeeff_voltage_swell_l1_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_1:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Voltage swells detected phase 1', + 'icon': 'mdi:alert', + }), + 'context': , + 'entity_id': 'sensor.device_voltage_swells_detected_phase_1', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_2:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_2:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_voltage_swells_detected_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:alert', + 'original_name': 'Voltage swells detected phase 2', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_swell_l2_count', + 'unique_id': 'aabbccddeeff_voltage_swell_l2_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_2:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Voltage swells detected phase 2', + 'icon': 'mdi:alert', + }), + 'context': , + 'entity_id': 'sensor.device_voltage_swells_detected_phase_2', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_3:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-P1', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.19', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_voltage_swells_detected_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:alert', + 'original_name': 'Voltage swells detected phase 3', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_swell_l3_count', + 'unique_id': 'aabbccddeeff_voltage_swell_l3_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-P1-zero-values-entity_ids1][sensor.device_voltage_swells_detected_phase_3:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Voltage swells detected phase 3', + 'icon': 'mdi:alert', + }), + 'context': , + 'entity_id': 'sensor.device_voltage_swells_detected_phase_3', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_active_power:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-SKT', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_active_power:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_w', + 'unique_id': 'aabbccddeeff_active_power_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_active_power:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power', + 'last_changed': , + 'last_updated': , + 'state': '1457.277', + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_active_power_phase_1:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-SKT', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_active_power_phase_1:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power phase 1', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_l1_w', + 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_active_power_phase_1:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power_phase_1', + 'last_changed': , + 'last_updated': , + 'state': '1457.277', + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_total_energy_export:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-SKT', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_total_energy_export:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy export', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_export_kwh', + 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_total_energy_export:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_export', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_total_energy_import:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-SKT', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_total_energy_import:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy import', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_import_kwh', + 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_total_energy_import:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_import', + 'last_changed': , + 'last_updated': , + 'state': '63.651', + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_wi_fi_ssid:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-SKT', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_wi_fi_ssid:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -4110,7 +6328,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_wi_fi_ssid:state] +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_wi_fi_ssid:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Wi-Fi SSID', @@ -4123,7 +6341,7 @@ 'state': 'My Wi-Fi', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_wi_fi_strength:device-registry] +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_wi_fi_strength:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -4146,16 +6364,16 @@ }), 'is_new': False, 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'serial_number': None, 'suggested_area': None, - 'sw_version': '3.06', + 'sw_version': '3.03', 'via_device_id': None, }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_wi_fi_strength:entity-registry] +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_wi_fi_strength:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -4188,7 +6406,1454 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[SDM230-entity_ids2][sensor.device_wi_fi_strength:state] +# name: test_sensors[HWE-SKT-entity_ids2][sensor.device_wi_fi_strength:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Wi-Fi strength', + 'icon': 'mdi:wifi', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.device_wi_fi_strength', + 'last_changed': , + 'last_updated': , + 'state': '94', + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_active_water_usage:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-WTR', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '2.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_active_water_usage:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_water_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:water', + 'original_name': 'Active water usage', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_liter_lpm', + 'unique_id': 'aabbccddeeff_active_liter_lpm', + 'unit_of_measurement': 'l/min', + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_active_water_usage:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Active water usage', + 'icon': 'mdi:water', + 'state_class': , + 'unit_of_measurement': 'l/min', + }), + 'context': , + 'entity_id': 'sensor.device_active_water_usage', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_total_water_usage:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-WTR', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '2.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_total_water_usage:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_water_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:gauge', + 'original_name': 'Total water usage', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_liter_m3', + 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_total_water_usage:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Device Total water usage', + 'icon': 'mdi:gauge', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_water_usage', + 'last_changed': , + 'last_updated': , + 'state': '17.014', + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_wi_fi_ssid:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-WTR', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '2.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_wi_fi_ssid:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_wi_fi_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Wi-Fi SSID', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_ssid', + 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_wi_fi_ssid:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Wi-Fi SSID', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'sensor.device_wi_fi_ssid', + 'last_changed': , + 'last_updated': , + 'state': 'My Wi-Fi', + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_wi_fi_strength:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'HWE-WTR', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '2.03', + 'via_device_id': None, + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_wi_fi_strength:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_wi_fi_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Wi-Fi strength', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_strength', + 'unique_id': 'aabbccddeeff_wifi_strength', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[HWE-WTR-entity_ids3][sensor.device_wi_fi_strength:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Wi-Fi strength', + 'icon': 'mdi:wifi', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.device_wi_fi_strength', + 'last_changed': , + 'last_updated': , + 'state': '84', + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_active_power:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM230-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_active_power:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_w', + 'unique_id': 'aabbccddeeff_active_power_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_active_power:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power', + 'last_changed': , + 'last_updated': , + 'state': '-1058.296', + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_active_power_phase_1:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM230-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_active_power_phase_1:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power phase 1', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_l1_w', + 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_active_power_phase_1:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power_phase_1', + 'last_changed': , + 'last_updated': , + 'state': '-1058.296', + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_total_energy_export:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM230-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_total_energy_export:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy export', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_export_kwh', + 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_total_energy_export:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_export', + 'last_changed': , + 'last_updated': , + 'state': '255.551', + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_total_energy_import:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM230-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_total_energy_import:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy import', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_import_kwh', + 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_total_energy_import:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_import', + 'last_changed': , + 'last_updated': , + 'state': '2.705', + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_wi_fi_ssid:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM230-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_wi_fi_ssid:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_wi_fi_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Wi-Fi SSID', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_ssid', + 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_wi_fi_ssid:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Wi-Fi SSID', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'sensor.device_wi_fi_ssid', + 'last_changed': , + 'last_updated': , + 'state': 'My Wi-Fi', + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_wi_fi_strength:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM230-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_wi_fi_strength:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_wi_fi_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Wi-Fi strength', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_strength', + 'unique_id': 'aabbccddeeff_wifi_strength', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[SDM230-entity_ids4][sensor.device_wi_fi_strength:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Wi-Fi strength', + 'icon': 'mdi:wifi', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.device_wi_fi_strength', + 'last_changed': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_w', + 'unique_id': 'aabbccddeeff_active_power_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power', + 'last_changed': , + 'last_updated': , + 'state': '-900.194', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_1:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_1:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power phase 1', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_l1_w', + 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_1:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power_phase_1', + 'last_changed': , + 'last_updated': , + 'state': '-1058.296', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_2:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_2:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power phase 2', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_l2_w', + 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_2:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power_phase_2', + 'last_changed': , + 'last_updated': , + 'state': '158.102', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_3:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_3:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_active_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active power phase 3', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_power_l3_w', + 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_active_power_phase_3:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Device Active power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_active_power_phase_3', + 'last_changed': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_total_energy_export:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_total_energy_export:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy export', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_export_kwh', + 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_total_energy_export:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_export', + 'last_changed': , + 'last_updated': , + 'state': '0.523', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_total_energy_import:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_total_energy_import:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.device_total_energy_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy import', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_energy_import_kwh', + 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_total_energy_import:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Device Total energy import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_total_energy_import', + 'last_changed': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_wi_fi_ssid:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_wi_fi_ssid:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_wi_fi_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Wi-Fi SSID', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_ssid', + 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_wi_fi_ssid:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Wi-Fi SSID', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'sensor.device_wi_fi_ssid', + 'last_changed': , + 'last_updated': , + 'state': 'My Wi-Fi', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_wi_fi_strength:device-registry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_wi_fi_strength:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_wi_fi_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Wi-Fi strength', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_strength', + 'unique_id': 'aabbccddeeff_wifi_strength', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[SDM630-entity_ids5][sensor.device_wi_fi_strength:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Wi-Fi strength', diff --git a/tests/components/homewizard/snapshots/test_switch.ambr b/tests/components/homewizard/snapshots/test_switch.ambr index d38fab029d3..0fb4680a0b1 100644 --- a/tests/components/homewizard/snapshots/test_switch.ambr +++ b/tests/components/homewizard/snapshots/test_switch.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_switch_entities[switch.device-state_set-power_on-HWE-SKT] +# name: test_switch_entities[HWE-SKT-switch.device-state_set-power_on] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'outlet', @@ -12,7 +12,7 @@ 'state': 'on', }) # --- -# name: test_switch_entities[switch.device-state_set-power_on-HWE-SKT].1 +# name: test_switch_entities[HWE-SKT-switch.device-state_set-power_on].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -43,7 +43,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_switch_entities[switch.device-state_set-power_on-HWE-SKT].2 +# name: test_switch_entities[HWE-SKT-switch.device-state_set-power_on].2 DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -75,7 +75,7 @@ 'via_device_id': None, }) # --- -# name: test_switch_entities[switch.device_cloud_connection-system_set-cloud_enabled-HWE-SKT] +# name: test_switch_entities[HWE-SKT-switch.device_cloud_connection-system_set-cloud_enabled] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Cloud connection', @@ -88,7 +88,7 @@ 'state': 'on', }) # --- -# name: test_switch_entities[switch.device_cloud_connection-system_set-cloud_enabled-HWE-SKT].1 +# name: test_switch_entities[HWE-SKT-switch.device_cloud_connection-system_set-cloud_enabled].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -119,7 +119,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_switch_entities[switch.device_cloud_connection-system_set-cloud_enabled-HWE-SKT].2 +# name: test_switch_entities[HWE-SKT-switch.device_cloud_connection-system_set-cloud_enabled].2 DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -151,7 +151,7 @@ 'via_device_id': None, }) # --- -# name: test_switch_entities[switch.device_switch_lock-state_set-switch_lock-HWE-SKT] +# name: test_switch_entities[HWE-SKT-switch.device_switch_lock-state_set-switch_lock] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Device Switch lock', @@ -164,7 +164,7 @@ 'state': 'off', }) # --- -# name: test_switch_entities[switch.device_switch_lock-state_set-switch_lock-HWE-SKT].1 +# name: test_switch_entities[HWE-SKT-switch.device_switch_lock-state_set-switch_lock].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -195,7 +195,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_switch_entities[switch.device_switch_lock-state_set-switch_lock-HWE-SKT].2 +# name: test_switch_entities[HWE-SKT-switch.device_switch_lock-state_set-switch_lock].2 DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , @@ -227,3 +227,155 @@ 'via_device_id': None, }) # --- +# name: test_switch_entities[SDM230-switch.device_cloud_connection-system_set-cloud_enabled] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Cloud connection', + 'icon': 'mdi:cloud', + }), + 'context': , + 'entity_id': 'switch.device_cloud_connection', + 'last_changed': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_entities[SDM230-switch.device_cloud_connection-system_set-cloud_enabled].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.device_cloud_connection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:cloud', + 'original_name': 'Cloud connection', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cloud_connection', + 'unique_id': 'aabbccddeeff_cloud_connection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_entities[SDM230-switch.device_cloud_connection-system_set-cloud_enabled].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM230-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- +# name: test_switch_entities[SDM630-switch.device_cloud_connection-system_set-cloud_enabled] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Cloud connection', + 'icon': 'mdi:cloud', + }), + 'context': , + 'entity_id': 'switch.device_cloud_connection', + 'last_changed': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_entities[SDM630-switch.device_cloud_connection-system_set-cloud_enabled].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.device_cloud_connection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:cloud', + 'original_name': 'Cloud connection', + 'platform': 'homewizard', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cloud_connection', + 'unique_id': 'aabbccddeeff_cloud_connection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_entities[SDM630-switch.device_cloud_connection-system_set-cloud_enabled].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '3c:39:e7:aa:bb:cc', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'homewizard', + '3c39e7aabbcc', + ), + }), + 'is_new': False, + 'manufacturer': 'HomeWizard', + 'model': 'SDM630-wifi', + 'name': 'Device', + 'name_by_user': None, + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '3.06', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/homewizard/test_button.py b/tests/components/homewizard/test_button.py index a7b7d0917e6..c25a4ed0f4e 100644 --- a/tests/components/homewizard/test_button.py +++ b/tests/components/homewizard/test_button.py @@ -17,7 +17,7 @@ pytestmark = [ ] -@pytest.mark.parametrize("device_fixture", ["HWE-WTR", "SDM230"]) +@pytest.mark.parametrize("device_fixture", ["HWE-WTR", "SDM230", "SDM630"]) async def test_identify_button_entity_not_loaded_when_not_available( hass: HomeAssistant, ) -> None: @@ -58,7 +58,10 @@ async def test_identify_button( # Raise RequestError when identify is called mock_homewizardenergy.identify.side_effect = RequestError() - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match=r"^An error occurred while communicating with HomeWizard device$", + ): await hass.services.async_call( button.DOMAIN, button.SERVICE_PRESS, @@ -73,7 +76,10 @@ async def test_identify_button( # Raise RequestError when identify is called mock_homewizardenergy.identify.side_effect = DisabledError() - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match=r"^The local API of the HomeWizard device is disabled$", + ): await hass.services.async_call( button.DOMAIN, button.SERVICE_PRESS, diff --git a/tests/components/homewizard/test_diagnostics.py b/tests/components/homewizard/test_diagnostics.py index ab7432e8dbf..5a140fa70c8 100644 --- a/tests/components/homewizard/test_diagnostics.py +++ b/tests/components/homewizard/test_diagnostics.py @@ -17,6 +17,7 @@ from tests.typing import ClientSessionGenerator "HWE-SKT", "HWE-WTR", "SDM230", + "SDM630", ], ) async def test_diagnostics( diff --git a/tests/components/homewizard/test_number.py b/tests/components/homewizard/test_number.py index 0062e32e54e..ebd8d80ece2 100644 --- a/tests/components/homewizard/test_number.py +++ b/tests/components/homewizard/test_number.py @@ -67,7 +67,10 @@ async def test_number_entities( mock_homewizardenergy.state_set.assert_called_with(brightness=127) mock_homewizardenergy.state_set.side_effect = RequestError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match=r"^An error occurred while communicating with HomeWizard device$", + ): await hass.services.async_call( number.DOMAIN, SERVICE_SET_VALUE, @@ -79,7 +82,10 @@ async def test_number_entities( ) mock_homewizardenergy.state_set.side_effect = DisabledError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match=r"^The local API of the HomeWizard device is disabled$", + ): await hass.services.async_call( number.DOMAIN, SERVICE_SET_VALUE, @@ -91,7 +97,7 @@ async def test_number_entities( ) -@pytest.mark.parametrize("device_fixture", ["HWE-WTR", "SDM230"]) +@pytest.mark.parametrize("device_fixture", ["HWE-WTR", "SDM230", "SDM630"]) async def test_entities_not_created_for_device(hass: HomeAssistant) -> None: """Does not load button when device has no support for it.""" assert not hass.states.get("number.device_status_light_brightness") diff --git a/tests/components/homewizard/test_sensor.py b/tests/components/homewizard/test_sensor.py index 04795a5e191..7e59769a768 100644 --- a/tests/components/homewizard/test_sensor.py +++ b/tests/components/homewizard/test_sensor.py @@ -69,6 +69,56 @@ pytestmark = [ "sensor.device_total_water_usage", ], ), + ( + "HWE-P1-zero-values", + [ + "sensor.device_total_energy_import", + "sensor.device_total_energy_import_tariff_1", + "sensor.device_total_energy_import_tariff_2", + "sensor.device_total_energy_import_tariff_3", + "sensor.device_total_energy_import_tariff_4", + "sensor.device_total_energy_export", + "sensor.device_total_energy_export_tariff_1", + "sensor.device_total_energy_export_tariff_2", + "sensor.device_total_energy_export_tariff_3", + "sensor.device_total_energy_export_tariff_4", + "sensor.device_active_power", + "sensor.device_active_power_phase_1", + "sensor.device_active_power_phase_2", + "sensor.device_active_power_phase_3", + "sensor.device_active_voltage_phase_1", + "sensor.device_active_voltage_phase_2", + "sensor.device_active_voltage_phase_3", + "sensor.device_active_current_phase_1", + "sensor.device_active_current_phase_2", + "sensor.device_active_current_phase_3", + "sensor.device_active_frequency", + "sensor.device_voltage_sags_detected_phase_1", + "sensor.device_voltage_sags_detected_phase_2", + "sensor.device_voltage_sags_detected_phase_3", + "sensor.device_voltage_swells_detected_phase_1", + "sensor.device_voltage_swells_detected_phase_2", + "sensor.device_voltage_swells_detected_phase_3", + "sensor.device_power_failures_detected", + "sensor.device_long_power_failures_detected", + "sensor.device_active_average_demand", + "sensor.device_peak_demand_current_month", + "sensor.device_total_gas", + "sensor.device_active_water_usage", + "sensor.device_total_water_usage", + ], + ), + ( + "HWE-SKT", + [ + "sensor.device_wi_fi_ssid", + "sensor.device_wi_fi_strength", + "sensor.device_total_energy_import", + "sensor.device_total_energy_export", + "sensor.device_active_power", + "sensor.device_active_power_phase_1", + ], + ), ( "HWE-WTR", [ @@ -84,13 +134,24 @@ pytestmark = [ "sensor.device_wi_fi_ssid", "sensor.device_wi_fi_strength", "sensor.device_total_energy_import", - "sensor.device_total_energy_import_tariff_1", "sensor.device_total_energy_export", - "sensor.device_total_energy_export_tariff_1", "sensor.device_active_power", "sensor.device_active_power_phase_1", ], ), + ( + "SDM630", + [ + "sensor.device_wi_fi_ssid", + "sensor.device_wi_fi_strength", + "sensor.device_total_energy_import", + "sensor.device_total_energy_export", + "sensor.device_active_power", + "sensor.device_active_power_phase_1", + "sensor.device_active_power_phase_2", + "sensor.device_active_power_phase_3", + ], + ), ], ) async def test_sensors( @@ -139,6 +200,12 @@ async def test_sensors( "sensor.device_total_energy_export_tariff_4", ], ), + ( + "HWE-SKT", + [ + "sensor.device_wi_fi_strength", + ], + ), ( "HWE-WTR", [ @@ -151,6 +218,12 @@ async def test_sensors( "sensor.device_wi_fi_strength", ], ), + ( + "SDM630", + [ + "sensor.device_wi_fi_strength", + ], + ), ], ) async def test_disabled_by_default_sensors( @@ -186,6 +259,46 @@ async def test_sensors_unreachable( @pytest.mark.parametrize( ("device_fixture", "entity_ids"), [ + ( + "HWE-SKT", + [ + "sensor.device_active_average_demand", + "sensor.device_active_current_phase_1", + "sensor.device_active_current_phase_2", + "sensor.device_active_current_phase_3", + "sensor.device_active_frequency", + "sensor.device_active_power_phase_2", + "sensor.device_active_power_phase_3", + "sensor.device_active_tariff", + "sensor.device_active_voltage_phase_1", + "sensor.device_active_voltage_phase_2", + "sensor.device_active_voltage_phase_3", + "sensor.device_active_water_usage", + "sensor.device_dsmr_version", + "sensor.device_gas_meter_identifier", + "sensor.device_long_power_failures_detected", + "sensor.device_peak_demand_current_month", + "sensor.device_power_failures_detected", + "sensor.device_smart_meter_identifier", + "sensor.device_smart_meter_model", + "sensor.device_total_energy_export_tariff_1", + "sensor.device_total_energy_export_tariff_2", + "sensor.device_total_energy_export_tariff_3", + "sensor.device_total_energy_export_tariff_4", + "sensor.device_total_energy_import_tariff_1", + "sensor.device_total_energy_import_tariff_2", + "sensor.device_total_energy_import_tariff_3", + "sensor.device_total_energy_import_tariff_4", + "sensor.device_total_gas", + "sensor.device_total_water_usage", + "sensor.device_voltage_sags_detected_phase_1", + "sensor.device_voltage_sags_detected_phase_2", + "sensor.device_voltage_sags_detected_phase_3", + "sensor.device_voltage_swells_detected_phase_1", + "sensor.device_voltage_swells_detected_phase_2", + "sensor.device_voltage_swells_detected_phase_3", + ], + ), ( "HWE-WTR", [ @@ -250,9 +363,49 @@ async def test_sensors_unreachable( "sensor.device_power_failures_detected", "sensor.device_smart_meter_identifier", "sensor.device_smart_meter_model", + "sensor.device_total_energy_export_tariff_1", "sensor.device_total_energy_export_tariff_2", "sensor.device_total_energy_export_tariff_3", "sensor.device_total_energy_export_tariff_4", + "sensor.device_total_energy_import_tariff_1", + "sensor.device_total_energy_import_tariff_2", + "sensor.device_total_energy_import_tariff_3", + "sensor.device_total_energy_import_tariff_4", + "sensor.device_total_gas", + "sensor.device_total_water_usage", + "sensor.device_voltage_sags_detected_phase_1", + "sensor.device_voltage_sags_detected_phase_2", + "sensor.device_voltage_sags_detected_phase_3", + "sensor.device_voltage_swells_detected_phase_1", + "sensor.device_voltage_swells_detected_phase_2", + "sensor.device_voltage_swells_detected_phase_3", + ], + ), + ( + "SDM630", + [ + "sensor.device_active_average_demand", + "sensor.device_active_current_phase_1", + "sensor.device_active_current_phase_2", + "sensor.device_active_current_phase_3", + "sensor.device_active_frequency", + "sensor.device_active_tariff", + "sensor.device_active_voltage_phase_1", + "sensor.device_active_voltage_phase_2", + "sensor.device_active_voltage_phase_3", + "sensor.device_active_water_usage", + "sensor.device_dsmr_version", + "sensor.device_gas_meter_identifier", + "sensor.device_long_power_failures_detected", + "sensor.device_peak_demand_current_month", + "sensor.device_power_failures_detected", + "sensor.device_smart_meter_identifier", + "sensor.device_smart_meter_model", + "sensor.device_total_energy_export_tariff_1", + "sensor.device_total_energy_export_tariff_2", + "sensor.device_total_energy_export_tariff_3", + "sensor.device_total_energy_export_tariff_4", + "sensor.device_total_energy_import_tariff_1", "sensor.device_total_energy_import_tariff_2", "sensor.device_total_energy_import_tariff_3", "sensor.device_total_energy_import_tariff_4", diff --git a/tests/components/homewizard/test_switch.py b/tests/components/homewizard/test_switch.py index 13a0bfaa863..2f6e777a3a8 100644 --- a/tests/components/homewizard/test_switch.py +++ b/tests/components/homewizard/test_switch.py @@ -42,7 +42,13 @@ pytestmark = [ [ "switch.device", "switch.device_switch_lock", - "switch.device_cloud_connection", + ], + ), + ( + "SDM630", + [ + "switch.device", + "switch.device_switch_lock", ], ), ], @@ -56,13 +62,14 @@ async def test_entities_not_created_for_device( assert not hass.states.get(entity_id) -@pytest.mark.parametrize("device_fixture", ["HWE-SKT"]) @pytest.mark.parametrize( - ("entity_id", "method", "parameter"), + ("device_fixture", "entity_id", "method", "parameter"), [ - ("switch.device", "state_set", "power_on"), - ("switch.device_switch_lock", "state_set", "switch_lock"), - ("switch.device_cloud_connection", "system_set", "cloud_enabled"), + ("HWE-SKT", "switch.device", "state_set", "power_on"), + ("HWE-SKT", "switch.device_switch_lock", "state_set", "switch_lock"), + ("HWE-SKT", "switch.device_cloud_connection", "system_set", "cloud_enabled"), + ("SDM230", "switch.device_cloud_connection", "system_set", "cloud_enabled"), + ("SDM630", "switch.device_cloud_connection", "system_set", "cloud_enabled"), ], ) async def test_switch_entities( @@ -113,7 +120,10 @@ async def test_switch_entities( # Test request error handling mocked_method.side_effect = RequestError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match=r"^An error occurred while communicating with HomeWizard device$", + ): await hass.services.async_call( switch.DOMAIN, SERVICE_TURN_ON, @@ -121,7 +131,10 @@ async def test_switch_entities( blocking=True, ) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match=r"^An error occurred while communicating with HomeWizard device$", + ): await hass.services.async_call( switch.DOMAIN, SERVICE_TURN_OFF, @@ -132,7 +145,10 @@ async def test_switch_entities( # Test disabled error handling mocked_method.side_effect = DisabledError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match=r"^The local API of the HomeWizard device is disabled$", + ): await hass.services.async_call( switch.DOMAIN, SERVICE_TURN_ON, @@ -140,7 +156,10 @@ async def test_switch_entities( blocking=True, ) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match=r"^The local API of the HomeWizard device is disabled$", + ): await hass.services.async_call( switch.DOMAIN, SERVICE_TURN_OFF, diff --git a/tests/components/http/__init__.py b/tests/components/http/__init__.py index 238f5c7050a..cd1d5916ab8 100644 --- a/tests/components/http/__init__.py +++ b/tests/components/http/__init__.py @@ -1,34 +1,3 @@ """Tests for the HTTP component.""" -from aiohttp import web - # Relic from the past. Kept here so we can run negative tests. HTTP_HEADER_HA_AUTH = "X-HA-access" - - -def mock_real_ip(app): - """Inject middleware to mock real IP. - - Returns a function to set the real IP. - """ - ip_to_mock = None - - def set_ip_to_mock(value): - nonlocal ip_to_mock - ip_to_mock = value - - @web.middleware - async def mock_real_ip(request, handler): - """Mock Real IP middleware.""" - nonlocal ip_to_mock - - request = request.clone(remote=ip_to_mock) - - return await handler(request) - - async def real_ip_startup(app): - """Startup of real ip.""" - app.middlewares.insert(0, mock_real_ip) - - app.on_startup.append(real_ip_startup) - - return set_ip_to_mock diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index 246572e64f8..2f1259c22de 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -35,9 +35,10 @@ from homeassistant.components.http.request_context import ( from homeassistant.core import HomeAssistant, callback from homeassistant.setup import async_setup_component -from . import HTTP_HEADER_HA_AUTH, mock_real_ip +from . import HTTP_HEADER_HA_AUTH from tests.common import MockUser +from tests.test_util import mock_real_ip from tests.typing import ClientSessionGenerator, WebSocketGenerator API_PASSWORD = "test-password" diff --git a/tests/components/http/test_ban.py b/tests/components/http/test_ban.py index 8082a268a80..e38a9c97071 100644 --- a/tests/components/http/test_ban.py +++ b/tests/components/http/test_ban.py @@ -24,9 +24,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from . import mock_real_ip - from tests.common import async_get_persistent_notifications +from tests.test_util import mock_real_ip from tests.typing import ClientSessionGenerator SUPERVISOR_IP = "1.2.3.4" @@ -392,3 +391,29 @@ async def test_failed_login_attempts_counter( resp = await client.get("/auth_false") assert resp.status == HTTPStatus.UNAUTHORIZED assert app[KEY_FAILED_LOGIN_ATTEMPTS][remote_ip] == 2 + + +async def test_single_ban_file_entry( + hass: HomeAssistant, +) -> None: + """Test that only one item is added to ban file.""" + app = web.Application() + app["hass"] = hass + + async def unauth_handler(request): + """Return a mock web response.""" + raise HTTPUnauthorized + + app.router.add_get("/example", unauth_handler) + setup_bans(hass, app, 2) + mock_real_ip(app)("200.201.202.204") + + manager: IpBanManager = app[KEY_BAN_MANAGER] + m_open = mock_open() + + with patch("homeassistant.components.http.ban.open", m_open, create=True): + remote_ip = ip_address("200.201.202.204") + await manager.async_add_ban(remote_ip) + await manager.async_add_ban(remote_ip) + + assert m_open.call_count == 1 diff --git a/tests/components/http/test_init.py b/tests/components/http/test_init.py index 5a5bffe6748..97e39811cd8 100644 --- a/tests/components/http/test_init.py +++ b/tests/components/http/test_init.py @@ -5,8 +5,7 @@ from http import HTTPStatus from ipaddress import ip_network import logging from pathlib import Path -import time -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import Mock, patch import pytest @@ -21,7 +20,6 @@ from homeassistant.util import dt as dt_util from homeassistant.util.ssl import server_context_intermediate, server_context_modern from tests.common import async_fire_time_changed -from tests.test_util.aiohttp import AiohttpClientMockResponse from tests.typing import ClientSessionGenerator @@ -501,22 +499,3 @@ async def test_logging( response = await client.get("/api/states/logging.entity") assert response.status == HTTPStatus.OK assert "GET /api/states/logging.entity" not in caplog.text - - -async def test_hass_access_logger_at_info_level( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test that logging happens at info level.""" - test_logger = logging.getLogger("test.aiohttp.logger") - logger = http.HomeAssistantAccessLogger(test_logger) - mock_request = MagicMock() - response = AiohttpClientMockResponse( - "POST", "http://127.0.0.1", status=HTTPStatus.OK - ) - setattr(response, "body_length", 42) - logger.log(mock_request, response, time.time()) - assert "42" in caplog.text - caplog.clear() - test_logger.setLevel(logging.WARNING) - logger.log(mock_request, response, time.time()) - assert "42" not in caplog.text diff --git a/tests/components/huawei_lte/__init__.py b/tests/components/huawei_lte/__init__.py index 79602ecfb44..2d43a5eade1 100644 --- a/tests/components/huawei_lte/__init__.py +++ b/tests/components/huawei_lte/__init__.py @@ -1 +1,23 @@ """Tests for the huawei_lte component.""" + +from unittest.mock import MagicMock + +from huawei_lte_api.enums.cradle import ConnectionStatusEnum + + +def magic_client(multi_basic_settings_value: dict) -> MagicMock: + """Mock huawei_lte.Client.""" + information = MagicMock(return_value={"SerialNumber": "test-serial-number"}) + check_notifications = MagicMock(return_value={"SmsStorageFull": 0}) + status = MagicMock( + return_value={"ConnectionStatus": ConnectionStatusEnum.CONNECTED.value} + ) + multi_basic_settings = MagicMock(return_value=multi_basic_settings_value) + wifi_feature_switch = MagicMock(return_value={"wifi24g_switch_enable": 1}) + device = MagicMock(information=information) + monitoring = MagicMock(check_notifications=check_notifications, status=status) + wlan = MagicMock( + multi_basic_settings=multi_basic_settings, + wifi_feature_switch=wifi_feature_switch, + ) + return MagicMock(device=device, monitoring=monitoring, wlan=wlan) diff --git a/tests/components/huawei_lte/test_button.py b/tests/components/huawei_lte/test_button.py new file mode 100644 index 00000000000..982fba166c3 --- /dev/null +++ b/tests/components/huawei_lte/test_button.py @@ -0,0 +1,76 @@ +"""Tests for the Huawei LTE switches.""" +from unittest.mock import MagicMock, patch + +from huawei_lte_api.enums.device import ControlModeEnum + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.huawei_lte.const import ( + BUTTON_KEY_CLEAR_TRAFFIC_STATISTICS, + BUTTON_KEY_RESTART, + DOMAIN, + SERVICE_SUSPEND_INTEGRATION, +) +from homeassistant.const import ATTR_ENTITY_ID, CONF_URL +from homeassistant.core import HomeAssistant + +from . import magic_client + +from tests.common import MockConfigEntry + +MOCK_CONF_URL = "http://huawei-lte.example.com" + + +@patch("homeassistant.components.huawei_lte.Connection", MagicMock()) +@patch("homeassistant.components.huawei_lte.Client", return_value=magic_client({})) +async def test_clear_traffic_statistics(client, hass: HomeAssistant) -> None: + """Test clear traffic statistics button.""" + huawei_lte = MockConfigEntry(domain=DOMAIN, data={CONF_URL: MOCK_CONF_URL}) + huawei_lte.add_to_hass(hass) + await hass.config_entries.async_setup(huawei_lte.entry_id) + await hass.async_block_till_done() + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: f"button.lte_{BUTTON_KEY_CLEAR_TRAFFIC_STATISTICS}"}, + blocking=True, + ) + await hass.async_block_till_done() + client.return_value.monitoring.set_clear_traffic.assert_called_once() + + client.return_value.monitoring.set_clear_traffic.reset_mock() + await hass.services.async_call( + DOMAIN, + SERVICE_SUSPEND_INTEGRATION, + {CONF_URL: MOCK_CONF_URL}, + blocking=True, + ) + await hass.async_block_till_done() + client.return_value.monitoring.set_clear_traffic.assert_not_called() + + +@patch("homeassistant.components.huawei_lte.Connection", MagicMock()) +@patch("homeassistant.components.huawei_lte.Client", return_value=magic_client({})) +async def test_restart(client, hass: HomeAssistant) -> None: + """Test restart button.""" + huawei_lte = MockConfigEntry(domain=DOMAIN, data={CONF_URL: MOCK_CONF_URL}) + huawei_lte.add_to_hass(hass) + await hass.config_entries.async_setup(huawei_lte.entry_id) + await hass.async_block_till_done() + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: f"button.lte_{BUTTON_KEY_RESTART}"}, + blocking=True, + ) + await hass.async_block_till_done() + client.return_value.device.set_control.assert_called_with(ControlModeEnum.REBOOT) + + client.return_value.device.set_control.reset_mock() + await hass.services.async_call( + DOMAIN, + SERVICE_SUSPEND_INTEGRATION, + {CONF_URL: MOCK_CONF_URL}, + blocking=True, + ) + await hass.async_block_till_done() + client.return_value.device.set_control.assert_not_called() diff --git a/tests/components/huawei_lte/test_config_flow.py b/tests/components/huawei_lte/test_config_flow.py index 13307e43648..e358920b07b 100644 --- a/tests/components/huawei_lte/test_config_flow.py +++ b/tests/components/huawei_lte/test_config_flow.py @@ -1,5 +1,7 @@ """Tests for the Huawei LTE config flow.""" +from typing import Any from unittest.mock import patch +from urllib.parse import urlparse, urlunparse from huawei_lte_api.enums.client import ResponseCodeEnum from huawei_lte_api.enums.user import LoginErrorEnum, LoginStateEnum, PasswordTypeEnum @@ -18,6 +20,7 @@ from homeassistant.const import ( CONF_RECIPIENT, CONF_URL, CONF_USERNAME, + CONF_VERIFY_SSL, ) from homeassistant.core import HomeAssistant @@ -25,8 +28,9 @@ from tests.common import MockConfigEntry FIXTURE_UNIQUE_ID = "SERIALNUMBER" -FIXTURE_USER_INPUT = { +FIXTURE_USER_INPUT: dict[str, Any] = { CONF_URL: "http://192.168.1.1/", + CONF_VERIFY_SSL: False, CONF_USERNAME: "admin", CONF_PASSWORD: "secret", } @@ -95,34 +99,59 @@ async def test_already_configured( assert result["reason"] == "already_configured" -async def test_connection_error( - hass: HomeAssistant, requests_mock: requests_mock.Mocker -) -> None: - """Test we show user form on connection error.""" - requests_mock.request(ANY, ANY, exc=ConnectionError()) +@pytest.mark.parametrize( + ("exception", "errors", "data_patch"), + ( + (ConnectionError(), {CONF_URL: "unknown"}, {}), + (requests.exceptions.SSLError(), {CONF_URL: "ssl_error_try_plain"}, {}), + ( + requests.exceptions.SSLError(), + {CONF_URL: "ssl_error_try_unverified"}, + {CONF_VERIFY_SSL: True}, + ), + ), +) +async def test_connection_errors( + hass: HomeAssistant, + requests_mock: requests_mock.Mocker, + exception: Exception, + errors: dict[str, str], + data_patch: dict[str, Any], +): + """Test we show user form on various errors.""" + requests_mock.request(ANY, ANY, exc=exception) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER}, data=FIXTURE_USER_INPUT + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data=FIXTURE_USER_INPUT | data_patch, ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "user" - assert result["errors"] == {CONF_URL: "unknown"} + assert result["errors"] == errors @pytest.fixture def login_requests_mock(requests_mock): """Set up a requests_mock with base mocks for login tests.""" - requests_mock.request( - ANY, FIXTURE_USER_INPUT[CONF_URL], text='' - ) - requests_mock.request( - ANY, - f"{FIXTURE_USER_INPUT[CONF_URL]}api/user/state-login", - text=( - f"{LoginStateEnum.LOGGED_OUT}" - f"{PasswordTypeEnum.SHA256}" - ), + https_url = urlunparse( + urlparse(FIXTURE_USER_INPUT[CONF_URL])._replace(scheme="https") ) + for url in FIXTURE_USER_INPUT[CONF_URL], https_url: + requests_mock.request(ANY, url, text='') + requests_mock.request( + ANY, + f"{url}api/user/state-login", + text=( + f"{LoginStateEnum.LOGGED_OUT}" + f"{PasswordTypeEnum.SHA256}" + ), + ) + requests_mock.request( + ANY, + f"{url}api/user/logout", + text="OK", + ) return requests_mock @@ -194,11 +223,19 @@ async def test_login_error( assert result["errors"] == errors -async def test_success(hass: HomeAssistant, login_requests_mock) -> None: +@pytest.mark.parametrize("scheme", ("http", "https")) +async def test_success(hass: HomeAssistant, login_requests_mock, scheme: str) -> None: """Test successful flow provides entry creation data.""" + user_input = { + **FIXTURE_USER_INPUT, + CONF_URL: urlunparse( + urlparse(FIXTURE_USER_INPUT[CONF_URL])._replace(scheme=scheme) + ), + } + login_requests_mock.request( ANY, - f"{FIXTURE_USER_INPUT[CONF_URL]}api/user/login", + f"{user_input[CONF_URL]}api/user/login", text="OK", ) with patch("homeassistant.components.huawei_lte.async_setup"), patch( @@ -207,14 +244,14 @@ async def test_success(hass: HomeAssistant, login_requests_mock) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=user_input, ) await hass.async_block_till_done() assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["data"][CONF_URL] == FIXTURE_USER_INPUT[CONF_URL] - assert result["data"][CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] - assert result["data"][CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] + assert result["data"][CONF_URL] == user_input[CONF_URL] + assert result["data"][CONF_USERNAME] == user_input[CONF_USERNAME] + assert result["data"][CONF_PASSWORD] == user_input[CONF_PASSWORD] @pytest.mark.parametrize( @@ -300,8 +337,9 @@ async def test_ssdp( ) for k, v in expected_result.items(): - assert result[k] == v + assert result[k] == v # type: ignore[literal-required] # expected is a subset if result.get("data_schema"): + assert result["data_schema"] is not None assert result["data_schema"]({})[CONF_URL] == url + "/" @@ -355,6 +393,7 @@ async def test_reauth( assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "reauth_confirm" + assert result["data_schema"] is not None assert result["data_schema"]({}) == { CONF_USERNAME: mock_entry_data[CONF_USERNAME], CONF_PASSWORD: mock_entry_data[CONF_PASSWORD], @@ -376,7 +415,7 @@ async def test_reauth( await hass.async_block_till_done() for k, v in expected_result.items(): - assert result[k] == v + assert result[k] == v # type: ignore[literal-required] # expected is a subset for k, v in expected_entry_data.items(): assert entry.data[k] == v diff --git a/tests/components/huawei_lte/test_switches.py b/tests/components/huawei_lte/test_switches.py index e686c2356e6..acaffdbd0ba 100644 --- a/tests/components/huawei_lte/test_switches.py +++ b/tests/components/huawei_lte/test_switches.py @@ -1,8 +1,6 @@ """Tests for the Huawei LTE switches.""" from unittest.mock import MagicMock, patch -from huawei_lte_api.enums.cradle import ConnectionStatusEnum - from homeassistant.components.huawei_lte.const import DOMAIN from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, @@ -13,29 +11,13 @@ from homeassistant.const import ATTR_ENTITY_ID, CONF_URL, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from . import magic_client + from tests.common import MockConfigEntry SWITCH_WIFI_GUEST_NETWORK = "switch.lte_wi_fi_guest_network" -def magic_client(multi_basic_settings_value: dict) -> MagicMock: - """Mock huawei_lte.Client.""" - information = MagicMock(return_value={"SerialNumber": "test-serial-number"}) - check_notifications = MagicMock(return_value={"SmsStorageFull": 0}) - status = MagicMock( - return_value={"ConnectionStatus": ConnectionStatusEnum.CONNECTED.value} - ) - multi_basic_settings = MagicMock(return_value=multi_basic_settings_value) - wifi_feature_switch = MagicMock(return_value={"wifi24g_switch_enable": 1}) - device = MagicMock(information=information) - monitoring = MagicMock(check_notifications=check_notifications, status=status) - wlan = MagicMock( - multi_basic_settings=multi_basic_settings, - wifi_feature_switch=wifi_feature_switch, - ) - return MagicMock(device=device, monitoring=monitoring, wlan=wlan) - - @patch("homeassistant.components.huawei_lte.Connection", MagicMock()) @patch("homeassistant.components.huawei_lte.Client", return_value=magic_client({})) async def test_huawei_lte_wifi_guest_network_config_entry_when_network_is_not_present( diff --git a/tests/components/hydrawise/conftest.py b/tests/components/hydrawise/conftest.py index 4a6c8372e57..1f892785812 100644 --- a/tests/components/hydrawise/conftest.py +++ b/tests/components/hydrawise/conftest.py @@ -1,14 +1,23 @@ """Common fixtures for the Hydrawise tests.""" -from collections.abc import Generator -from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from collections.abc import Awaitable, Callable, Generator +from datetime import datetime, timedelta +from unittest.mock import AsyncMock, patch +from pydrawise.schema import ( + Controller, + ControllerHardware, + ScheduledZoneRun, + ScheduledZoneRuns, + User, + Zone, +) import pytest from homeassistant.components.hydrawise.const import DOMAIN from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry @@ -24,59 +33,71 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture def mock_pydrawise( - mock_controller: dict[str, Any], - mock_zones: list[dict[str, Any]], -) -> Generator[Mock, None, None]: - """Mock LegacyHydrawise.""" - with patch("pydrawise.legacy.LegacyHydrawise", autospec=True) as mock_pydrawise: - mock_pydrawise.return_value.controller_info = {"controllers": [mock_controller]} - mock_pydrawise.return_value.current_controller = mock_controller - mock_pydrawise.return_value.controller_status = {"relays": mock_zones} - mock_pydrawise.return_value.relays = mock_zones - mock_pydrawise.return_value.relays_by_zone_number = { - r["relay"]: r for r in mock_zones - } + user: User, + controller: Controller, + zones: list[Zone], +) -> Generator[AsyncMock, None, None]: + """Mock LegacyHydrawiseAsync.""" + with patch( + "pydrawise.legacy.LegacyHydrawiseAsync", autospec=True + ) as mock_pydrawise: + user.controllers = [controller] + controller.zones = zones + mock_pydrawise.return_value.get_user.return_value = user yield mock_pydrawise.return_value @pytest.fixture -def mock_controller() -> dict[str, Any]: - """Mock Hydrawise controller.""" - return { - "name": "Home Controller", - "last_contact": 1693292420, - "serial_number": "0310b36090", - "controller_id": 52496, - "status": "Unknown", - } +def user() -> User: + """Hydrawise User fixture.""" + return User(customer_id=12345) @pytest.fixture -def mock_zones() -> list[dict[str, Any]]: - """Mock Hydrawise zones.""" +def controller() -> Controller: + """Hydrawise Controller fixture.""" + return Controller( + id=52496, + name="Home Controller", + hardware=ControllerHardware( + serial_number="0310b36090", + ), + last_contact_time=datetime.fromtimestamp(1693292420), + online=True, + ) + + +@pytest.fixture +def zones() -> list[Zone]: + """Hydrawise zone fixtures.""" return [ - { - "name": "Zone One", - "period": 259200, - "relay": 1, - "relay_id": 5965394, - "run": 1800, - "stop": 1, - "time": 330597, - "timestr": "Sat", - "type": 1, - }, - { - "name": "Zone Two", - "period": 259200, - "relay": 2, - "relay_id": 5965395, - "run": 1788, - "stop": 1, - "time": 1, - "timestr": "Now", - "type": 106, - }, + Zone( + name="Zone One", + number=1, + id=5965394, + scheduled_runs=ScheduledZoneRuns( + summary="", + current_run=None, + next_run=ScheduledZoneRun( + start_time=dt_util.now() + timedelta(seconds=330597), + end_time=dt_util.now() + + timedelta(seconds=330597) + + timedelta(seconds=1800), + normal_duration=timedelta(seconds=1800), + duration=timedelta(seconds=1800), + ), + ), + ), + Zone( + name="Zone Two", + number=2, + id=5965395, + scheduled_runs=ScheduledZoneRuns( + current_run=ScheduledZoneRun( + remaining_time=timedelta(seconds=1788), + ), + ), + ), ] @@ -95,13 +116,25 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture async def mock_added_config_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_pydrawise: Mock, + mock_add_config_entry: Callable[[], Awaitable[MockConfigEntry]] ) -> MockConfigEntry: """Mock ConfigEntry that's been added to HA.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert DOMAIN in hass.config_entries.async_domains() - return mock_config_entry + return await mock_add_config_entry() + + +@pytest.fixture +async def mock_add_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, +) -> Callable[[], Awaitable[MockConfigEntry]]: + """Callable that creates a mock ConfigEntry that's been added to HA.""" + + async def callback() -> MockConfigEntry: + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert DOMAIN in hass.config_entries.async_domains() + return mock_config_entry + + return callback diff --git a/tests/components/hydrawise/test_binary_sensor.py b/tests/components/hydrawise/test_binary_sensor.py index c60f4392f1e..f4702758136 100644 --- a/tests/components/hydrawise/test_binary_sensor.py +++ b/tests/components/hydrawise/test_binary_sensor.py @@ -1,8 +1,9 @@ """Test Hydrawise binary_sensor.""" from datetime import timedelta -from unittest.mock import Mock +from unittest.mock import AsyncMock +from aiohttp import ClientError from freezegun.api import FrozenDateTimeFactory from homeassistant.components.hydrawise.const import SCAN_INTERVAL @@ -33,12 +34,13 @@ async def test_states( async def test_update_data_fails( hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, - mock_pydrawise: Mock, + mock_pydrawise: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test that no data from the API sets the correct connectivity.""" # Make the coordinator refresh data. - mock_pydrawise.update_controller_info.return_value = None + mock_pydrawise.get_user.reset_mock(return_value=True) + mock_pydrawise.get_user.side_effect = ClientError freezer.tick(SCAN_INTERVAL + timedelta(seconds=30)) async_fire_time_changed(hass) await hass.async_block_till_done() diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index c9efbea507e..17c3eda1699 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -1,9 +1,10 @@ """Test the Hydrawise config flow.""" -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock +from aiohttp import ClientError +from pydrawise.schema import User import pytest -from requests.exceptions import ConnectTimeout, HTTPError from homeassistant import config_entries from homeassistant.components.hydrawise.const import DOMAIN @@ -17,9 +18,11 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -@patch("pydrawise.legacy.LegacyHydrawise") async def test_form( - mock_api: MagicMock, hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_pydrawise: AsyncMock, + user: User, ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -32,19 +35,22 @@ async def test_form( result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": "abc123"} ) - mock_api.return_value.customer_id = 12345 + mock_pydrawise.get_user.return_value = user await hass.async_block_till_done() assert result2["type"] == FlowResultType.CREATE_ENTRY assert result2["title"] == "Hydrawise" assert result2["data"] == {"api_key": "abc123"} assert len(mock_setup_entry.mock_calls) == 1 + mock_pydrawise.get_user.assert_called_once_with(fetch_zones=False) -@patch("pydrawise.legacy.LegacyHydrawise") -async def test_form_api_error(mock_api: MagicMock, hass: HomeAssistant) -> None: +async def test_form_api_error( + hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User +) -> None: """Test we handle API errors.""" - mock_api.side_effect = HTTPError + mock_pydrawise.get_user.side_effect = ClientError("XXX") + init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -55,15 +61,17 @@ async def test_form_api_error(mock_api: MagicMock, hass: HomeAssistant) -> None: assert result["type"] == FlowResultType.FORM assert result["errors"] == {"base": "cannot_connect"} - mock_api.side_effect = None + mock_pydrawise.get_user.reset_mock(side_effect=True) + mock_pydrawise.get_user.return_value = user result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) assert result2["type"] == FlowResultType.CREATE_ENTRY -@patch("pydrawise.legacy.LegacyHydrawise") -async def test_form_connect_timeout(mock_api: MagicMock, hass: HomeAssistant) -> None: +async def test_form_connect_timeout( + hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User +) -> None: """Test we handle API errors.""" - mock_api.side_effect = ConnectTimeout + mock_pydrawise.get_user.side_effect = TimeoutError init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -75,15 +83,17 @@ async def test_form_connect_timeout(mock_api: MagicMock, hass: HomeAssistant) -> assert result["type"] == FlowResultType.FORM assert result["errors"] == {"base": "timeout_connect"} - mock_api.side_effect = None + mock_pydrawise.get_user.reset_mock(side_effect=True) + mock_pydrawise.get_user.return_value = user result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) assert result2["type"] == FlowResultType.CREATE_ENTRY -@patch("pydrawise.legacy.LegacyHydrawise") -async def test_flow_import_success(mock_api: MagicMock, hass: HomeAssistant) -> None: +async def test_flow_import_success( + hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User +) -> None: """Test that we can import a YAML config.""" - mock_api.return_value.status = "All good!" + mock_pydrawise.get_user.return_value = User result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, @@ -107,9 +117,11 @@ async def test_flow_import_success(mock_api: MagicMock, hass: HomeAssistant) -> assert issue.translation_key == "deprecated_yaml" -@patch("pydrawise.legacy.LegacyHydrawise", side_effect=HTTPError) -async def test_flow_import_api_error(mock_api: MagicMock, hass: HomeAssistant) -> None: +async def test_flow_import_api_error( + hass: HomeAssistant, mock_pydrawise: AsyncMock +) -> None: """Test that we handle API errors on YAML import.""" + mock_pydrawise.get_user.side_effect = ClientError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, @@ -129,11 +141,11 @@ async def test_flow_import_api_error(mock_api: MagicMock, hass: HomeAssistant) - assert issue.translation_key == "deprecated_yaml_import_issue" -@patch("pydrawise.legacy.LegacyHydrawise", side_effect=ConnectTimeout) async def test_flow_import_connect_timeout( - mock_api: MagicMock, hass: HomeAssistant + hass: HomeAssistant, mock_pydrawise: AsyncMock ) -> None: """Test that we handle connection timeouts on YAML import.""" + mock_pydrawise.get_user.side_effect = TimeoutError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, @@ -153,32 +165,8 @@ async def test_flow_import_connect_timeout( assert issue.translation_key == "deprecated_yaml_import_issue" -@patch("pydrawise.legacy.LegacyHydrawise") -async def test_flow_import_no_status(mock_api: MagicMock, hass: HomeAssistant) -> None: - """Test we handle a lack of API status on YAML import.""" - mock_api.return_value.status = None - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, - ) - await hass.async_block_till_done() - assert result["type"] == FlowResultType.ABORT - assert result["reason"] == "unknown" - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - DOMAIN, "deprecated_yaml_import_issue_unknown" - ) - assert issue.translation_key == "deprecated_yaml_import_issue" - - -@patch("pydrawise.legacy.LegacyHydrawise") async def test_flow_import_already_imported( - mock_api: MagicMock, hass: HomeAssistant + hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User ) -> None: """Test that we can handle a YAML config already imported.""" mock_config_entry = MockConfigEntry( @@ -187,12 +175,12 @@ async def test_flow_import_already_imported( data={ CONF_API_KEY: "__api_key__", }, - unique_id="hydrawise-CUSTOMER_ID", + unique_id="hydrawise-12345", ) mock_config_entry.add_to_hass(hass) - mock_api.return_value.customer_id = "CUSTOMER_ID" - mock_api.return_value.status = "All good!" + mock_pydrawise.get_user.return_value = user + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, diff --git a/tests/components/hydrawise/test_init.py b/tests/components/hydrawise/test_init.py index 79cea94d479..6b41867b044 100644 --- a/tests/components/hydrawise/test_init.py +++ b/tests/components/hydrawise/test_init.py @@ -1,8 +1,8 @@ """Tests for the Hydrawise integration.""" -from unittest.mock import Mock +from unittest.mock import AsyncMock -from requests.exceptions import HTTPError +from aiohttp import ClientError from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_ACCESS_TOKEN @@ -13,11 +13,10 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -async def test_setup_import_success(hass: HomeAssistant, mock_pydrawise: Mock) -> None: +async def test_setup_import_success( + hass: HomeAssistant, mock_pydrawise: AsyncMock +) -> None: """Test that setup with a YAML config triggers an import and warning.""" - mock_pydrawise.update_controller_info.return_value = True - mock_pydrawise.customer_id = 12345 - mock_pydrawise.status = "unknown" config = {"hydrawise": {CONF_ACCESS_TOKEN: "_access-token_"}} assert await async_setup_component(hass, "hydrawise", config) await hass.async_block_till_done() @@ -30,21 +29,10 @@ async def test_setup_import_success(hass: HomeAssistant, mock_pydrawise: Mock) - async def test_connect_retry( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_pydrawise: Mock + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_pydrawise: AsyncMock ) -> None: """Test that a connection error triggers a retry.""" - mock_pydrawise.update_controller_info.side_effect = HTTPError - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_no_data( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_pydrawise: Mock -) -> None: - """Test that no data from the API triggers a retry.""" - mock_pydrawise.update_controller_info.return_value = False + mock_pydrawise.get_user.side_effect = ClientError mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/hydrawise/test_sensor.py b/tests/components/hydrawise/test_sensor.py index c6d3fecab65..f0edb79b349 100644 --- a/tests/components/hydrawise/test_sensor.py +++ b/tests/components/hydrawise/test_sensor.py @@ -1,6 +1,9 @@ """Test Hydrawise sensor.""" +from collections.abc import Awaitable, Callable + from freezegun.api import FrozenDateTimeFactory +from pydrawise.schema import Zone import pytest from homeassistant.core import HomeAssistant @@ -26,3 +29,18 @@ async def test_states( next_cycle = hass.states.get("sensor.zone_one_next_cycle") assert next_cycle is not None assert next_cycle.state == "2023-10-04T19:49:57+00:00" + + +@pytest.mark.freeze_time("2023-10-01 00:00:00+00:00") +async def test_suspended_state( + hass: HomeAssistant, + zones: list[Zone], + mock_add_config_entry: Callable[[], Awaitable[MockConfigEntry]], +) -> None: + """Test sensor states.""" + zones[0].scheduled_runs.next_run = None + await mock_add_config_entry() + + next_cycle = hass.states.get("sensor.zone_one_next_cycle") + assert next_cycle is not None + assert next_cycle.state == "9999-12-31T23:59:59+00:00" diff --git a/tests/components/hydrawise/test_switch.py b/tests/components/hydrawise/test_switch.py index 1d2de7f8332..30a58735122 100644 --- a/tests/components/hydrawise/test_switch.py +++ b/tests/components/hydrawise/test_switch.py @@ -1,12 +1,16 @@ """Test Hydrawise switch.""" -from unittest.mock import Mock +from datetime import timedelta +from unittest.mock import AsyncMock -from freezegun.api import FrozenDateTimeFactory +from pydrawise.schema import Zone +import pytest +from homeassistant.components.hydrawise.const import DEFAULT_WATERING_TIME from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry @@ -14,7 +18,6 @@ from tests.common import MockConfigEntry async def test_states( hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, ) -> None: """Test switch states.""" watering1 = hass.states.get("switch.zone_one_manual_watering") @@ -31,11 +34,14 @@ async def test_states( auto_watering2 = hass.states.get("switch.zone_two_automatic_watering") assert auto_watering2 is not None - assert auto_watering2.state == "off" + assert auto_watering2.state == "on" async def test_manual_watering_services( - hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, mock_pydrawise: Mock + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], ) -> None: """Test Manual Watering services.""" await hass.services.async_call( @@ -44,7 +50,9 @@ async def test_manual_watering_services( service_data={ATTR_ENTITY_ID: "switch.zone_one_manual_watering"}, blocking=True, ) - mock_pydrawise.run_zone.assert_called_once_with(15, 1) + mock_pydrawise.start_zone.assert_called_once_with( + zones[0], custom_run_duration=DEFAULT_WATERING_TIME + ) state = hass.states.get("switch.zone_one_manual_watering") assert state is not None assert state.state == "on" @@ -56,14 +64,18 @@ async def test_manual_watering_services( service_data={ATTR_ENTITY_ID: "switch.zone_one_manual_watering"}, blocking=True, ) - mock_pydrawise.run_zone.assert_called_once_with(0, 1) + mock_pydrawise.stop_zone.assert_called_once_with(zones[0]) state = hass.states.get("switch.zone_one_manual_watering") assert state is not None assert state.state == "off" +@pytest.mark.freeze_time("2023-10-01 00:00:00+00:00") async def test_auto_watering_services( - hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, mock_pydrawise: Mock + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], ) -> None: """Test Automatic Watering services.""" await hass.services.async_call( @@ -72,7 +84,9 @@ async def test_auto_watering_services( service_data={ATTR_ENTITY_ID: "switch.zone_one_automatic_watering"}, blocking=True, ) - mock_pydrawise.suspend_zone.assert_called_once_with(365, 1) + mock_pydrawise.suspend_zone.assert_called_once_with( + zones[0], dt_util.now() + timedelta(days=365) + ) state = hass.states.get("switch.zone_one_automatic_watering") assert state is not None assert state.state == "off" @@ -84,7 +98,7 @@ async def test_auto_watering_services( service_data={ATTR_ENTITY_ID: "switch.zone_one_automatic_watering"}, blocking=True, ) - mock_pydrawise.suspend_zone.assert_called_once_with(0, 1) + mock_pydrawise.resume_zone.assert_called_once_with(zones[0]) state = hass.states.get("switch.zone_one_automatic_watering") assert state is not None assert state.state == "on" diff --git a/tests/components/iaqualink/test_init.py b/tests/components/iaqualink/test_init.py index 7b61b42c9d2..646e9e4da86 100644 --- a/tests/components/iaqualink/test_init.py +++ b/tests/components/iaqualink/test_init.py @@ -114,7 +114,8 @@ async def test_setup_devices_exception( "homeassistant.components.iaqualink.AqualinkClient.get_systems", return_value=systems, ), patch.object( - system, "get_devices" + system, + "get_devices", ) as mock_get_devices: mock_get_devices.side_effect = AqualinkServiceException await hass.config_entries.async_setup(config_entry.entry_id) @@ -142,7 +143,8 @@ async def test_setup_all_good_no_recognized_devices( "homeassistant.components.iaqualink.AqualinkClient.get_systems", return_value=systems, ), patch.object( - system, "get_devices" + system, + "get_devices", ) as mock_get_devices: mock_get_devices.return_value = devices await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/idasen_desk/conftest.py b/tests/components/idasen_desk/conftest.py index d6c2ba5ad6b..8159039aff4 100644 --- a/tests/components/idasen_desk/conftest.py +++ b/tests/components/idasen_desk/conftest.py @@ -55,6 +55,7 @@ def mock_desk_api(): mock_desk.move_up = AsyncMock(side_effect=mock_move_up) mock_desk.move_down = AsyncMock(side_effect=mock_move_down) mock_desk.stop = AsyncMock() + mock_desk.height = 1 mock_desk.height_percent = 60 mock_desk.is_moving = False mock_desk.address = "AA:BB:CC:DD:EE:FF" diff --git a/tests/components/idasen_desk/test_cover.py b/tests/components/idasen_desk/test_cover.py index a9c74be7081..4c8bf7806e0 100644 --- a/tests/components/idasen_desk/test_cover.py +++ b/tests/components/idasen_desk/test_cover.py @@ -2,6 +2,7 @@ from typing import Any from unittest.mock import MagicMock +from bleak.exc import BleakError import pytest from homeassistant.components.cover import ( @@ -19,6 +20,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from . import init_integration @@ -80,3 +82,34 @@ async def test_cover_services( assert state assert state.state == expected_state assert state.attributes[ATTR_CURRENT_POSITION] == expected_position + + +@pytest.mark.parametrize( + ("service", "service_data", "mock_method_name"), + [ + (SERVICE_SET_COVER_POSITION, {ATTR_POSITION: 100}, "move_to"), + (SERVICE_OPEN_COVER, {}, "move_up"), + (SERVICE_CLOSE_COVER, {}, "move_down"), + (SERVICE_STOP_COVER, {}, "stop"), + ], +) +async def test_cover_services_exception( + hass: HomeAssistant, + mock_desk_api: MagicMock, + service: str, + service_data: dict[str, Any], + mock_method_name: str, +) -> None: + """Test cover services exception handling.""" + entity_id = "cover.test" + await init_integration(hass) + fail_call = getattr(mock_desk_api, mock_method_name) + fail_call.side_effect = BleakError() + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + COVER_DOMAIN, + service, + {"entity_id": entity_id, **service_data}, + blocking=True, + ) + await hass.async_block_till_done() diff --git a/tests/components/idasen_desk/test_sensors.py b/tests/components/idasen_desk/test_sensors.py new file mode 100644 index 00000000000..23d7ac2447b --- /dev/null +++ b/tests/components/idasen_desk/test_sensors.py @@ -0,0 +1,27 @@ +"""Test the IKEA Idasen Desk sensors.""" +from unittest.mock import MagicMock + +from homeassistant.core import HomeAssistant + +from . import init_integration + + +async def test_height_sensor( + hass: HomeAssistant, + mock_desk_api: MagicMock, + entity_registry_enabled_by_default: None, +) -> None: + """Test height sensor.""" + await init_integration(hass) + + entity_id = "sensor.test_height" + state = hass.states.get(entity_id) + assert state + assert state.state == "1" + + mock_desk_api.height = 1.2 + mock_desk_api.trigger_update_callback(None) + + state = hass.states.get(entity_id) + assert state + assert state.state == "1.2" diff --git a/tests/components/imap/const.py b/tests/components/imap/const.py index ec864fd4665..713261936c7 100644 --- a/tests/components/imap/const.py +++ b/tests/components/imap/const.py @@ -18,16 +18,25 @@ TEST_MESSAGE_HEADERS1 = ( b"for ; Fri, 24 Mar 2023 13:52:01 +0100 (CET)\r\n" ) TEST_MESSAGE_HEADERS2 = ( - b"MIME-Version: 1.0\r\n" b"To: notify@example.com\r\n" b"From: John Doe \r\n" b"Subject: Test subject\r\n" - b"Message-ID: " + b"Message-ID: \r\n" + b"MIME-Version: 1.0\r\n" +) + +TEST_MULTIPART_HEADER = ( + b'Content-Type: multipart/related;\r\n\tboundary="Mark=_100584970350292485166"' ) TEST_MESSAGE_HEADERS3 = b"" TEST_MESSAGE = TEST_MESSAGE_HEADERS1 + DATE_HEADER1 + TEST_MESSAGE_HEADERS2 + +TEST_MESSAGE_MULTIPART = ( + TEST_MESSAGE_HEADERS1 + DATE_HEADER1 + TEST_MESSAGE_HEADERS2 + TEST_MULTIPART_HEADER +) + TEST_MESSAGE_NO_SUBJECT_TO_FROM = ( TEST_MESSAGE_HEADERS1 + DATE_HEADER1 + TEST_MESSAGE_HEADERS3 ) @@ -44,21 +53,27 @@ TEST_INVALID_DATE3 = ( TEST_CONTENT_TEXT_BARE = b"\r\nTest body\r\n\r\n" -TEST_CONTENT_BINARY = ( - b"Content-Type: application/binary\r\n" - b"Content-Transfer-Encoding: base64\r\n" - b"\r\n" - b"VGVzdCBib2R5\r\n" -) +TEST_CONTENT_BINARY = b"Content-Type: application/binary\r\n\r\nTest body\r\n" TEST_CONTENT_TEXT_PLAIN = ( - b"Content-Type: text/plain; charset=UTF-8; format=flowed\r\n" - b"Content-Transfer-Encoding: 7bit\r\n\r\nTest body\r\n\r\n" + b'Content-Type: text/plain; charset="utf-8"\r\n' + b"Content-Transfer-Encoding: 7bit\r\n\r\nTest body\r\n" ) +TEST_CONTENT_TEXT_BASE64 = ( + b'Content-Type: text/plain; charset="utf-8"\r\n' + b"Content-Transfer-Encoding: base64\r\n\r\nVGVzdCBib2R5\r\n" +) + +TEST_CONTENT_TEXT_BASE64_INVALID = ( + b'Content-Type: text/plain; charset="utf-8"\r\n' + b"Content-Transfer-Encoding: base64\r\n\r\nVGVzdCBib2R5invalid\r\n" +) +TEST_BADLY_ENCODED_CONTENT = "VGVzdCBib2R5invalid\r\n" + TEST_CONTENT_TEXT_OTHER = ( b"Content-Type: text/other; charset=UTF-8\r\n" - b"Content-Transfer-Encoding: 7bit\r\n\r\nTest body\r\n\r\n" + b"Content-Transfer-Encoding: 7bit\r\n\r\nTest body\r\n" ) TEST_CONTENT_HTML = ( @@ -76,14 +91,40 @@ TEST_CONTENT_HTML = ( b"\r\n" b"\r\n" ) +TEST_CONTENT_HTML_BASE64 = ( + b"Content-Type: text/html; charset=UTF-8\r\n" + b"Content-Transfer-Encoding: base64\r\n\r\n" + b"PGh0bWw+CiAgICA8aGVhZD48bWV0YSBodHRwLWVxdW" + b"l2PSJjb250ZW50LXR5cGUiIGNvbnRlbnQ9InRleHQvaHRtbDsgY2hhcnNldD1VVEYtOCI+PC9oZWFkPgog" + b"CAgPGJvZHk+CiAgICAgIDxwPlRlc3QgYm9keTxicj48L3A+CiAgICA8L2JvZHk+CjwvaHRtbD4=\r\n" +) + TEST_CONTENT_MULTIPART = ( b"\r\nThis is a multi-part message in MIME format.\r\n" - + b"--------------McwBciN2C0o3rWeF1tmFo2oI\r\n" + + b"\r\n--Mark=_100584970350292485166\r\n" + TEST_CONTENT_TEXT_PLAIN - + b"--------------McwBciN2C0o3rWeF1tmFo2oI\r\n" + + b"\r\n--Mark=_100584970350292485166\r\n" + TEST_CONTENT_HTML - + b"--------------McwBciN2C0o3rWeF1tmFo2oI--\r\n" + + b"\r\n--Mark=_100584970350292485166--\r\n" +) + +TEST_CONTENT_MULTIPART_BASE64 = ( + b"\r\nThis is a multi-part message in MIME format.\r\n" + + b"\r\n--Mark=_100584970350292485166\r\n" + + TEST_CONTENT_TEXT_BASE64 + + b"\r\n--Mark=_100584970350292485166\r\n" + + TEST_CONTENT_HTML_BASE64 + + b"\r\n--Mark=_100584970350292485166--\r\n" +) + +TEST_CONTENT_MULTIPART_BASE64_INVALID = ( + b"\r\nThis is a multi-part message in MIME format.\r\n" + + b"\r\n--Mark=_100584970350292485166\r\n" + + TEST_CONTENT_TEXT_BASE64_INVALID + + b"\r\n--Mark=_100584970350292485166\r\n" + + TEST_CONTENT_HTML_BASE64 + + b"\r\n--Mark=_100584970350292485166--\r\n" ) EMPTY_SEARCH_RESPONSE = ("OK", [b"", b"Search completed (0.0001 + 0.000 secs)."]) @@ -202,14 +243,40 @@ TEST_FETCH_RESPONSE_MULTIPART = ( "OK", [ b"1 FETCH (BODY[] {" - + str(len(TEST_MESSAGE + TEST_CONTENT_MULTIPART)).encode("utf-8") + + str(len(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART)).encode("utf-8") + b"}", - bytearray(TEST_MESSAGE + TEST_CONTENT_MULTIPART), + bytearray(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART), + b")", + b"Fetch completed (0.0001 + 0.000 secs).", + ], +) +TEST_FETCH_RESPONSE_MULTIPART_BASE64 = ( + "OK", + [ + b"1 FETCH (BODY[] {" + + str(len(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART_BASE64)).encode( + "utf-8" + ) + + b"}", + bytearray(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART_BASE64), b")", b"Fetch completed (0.0001 + 0.000 secs).", ], ) +TEST_FETCH_RESPONSE_MULTIPART_BASE64_INVALID = ( + "OK", + [ + b"1 FETCH (BODY[] {" + + str( + len(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART_BASE64_INVALID) + ).encode("utf-8") + + b"}", + bytearray(TEST_MESSAGE_MULTIPART + TEST_CONTENT_MULTIPART_BASE64_INVALID), + b")", + b"Fetch completed (0.0001 + 0.000 secs).", + ], +) TEST_FETCH_RESPONSE_NO_SUBJECT_TO_FROM = ( "OK", diff --git a/tests/components/imap/test_init.py b/tests/components/imap/test_init.py index ceda841202c..a00f9d9c25d 100644 --- a/tests/components/imap/test_init.py +++ b/tests/components/imap/test_init.py @@ -17,12 +17,15 @@ from homeassistant.util.dt import utcnow from .const import ( BAD_RESPONSE, EMPTY_SEARCH_RESPONSE, + TEST_BADLY_ENCODED_CONTENT, TEST_FETCH_RESPONSE_BINARY, TEST_FETCH_RESPONSE_HTML, TEST_FETCH_RESPONSE_INVALID_DATE1, TEST_FETCH_RESPONSE_INVALID_DATE2, TEST_FETCH_RESPONSE_INVALID_DATE3, TEST_FETCH_RESPONSE_MULTIPART, + TEST_FETCH_RESPONSE_MULTIPART_BASE64, + TEST_FETCH_RESPONSE_MULTIPART_BASE64_INVALID, TEST_FETCH_RESPONSE_NO_SUBJECT_TO_FROM, TEST_FETCH_RESPONSE_TEXT_BARE, TEST_FETCH_RESPONSE_TEXT_OTHER, @@ -110,6 +113,7 @@ async def test_entry_startup_fails( (TEST_FETCH_RESPONSE_TEXT_OTHER, True), (TEST_FETCH_RESPONSE_HTML, True), (TEST_FETCH_RESPONSE_MULTIPART, True), + (TEST_FETCH_RESPONSE_MULTIPART_BASE64, True), (TEST_FETCH_RESPONSE_BINARY, True), ], ids=[ @@ -122,6 +126,7 @@ async def test_entry_startup_fails( "other", "html", "multipart", + "multipart_base64", "binary", ], ) @@ -154,7 +159,7 @@ async def test_receiving_message_successfully( assert data["folder"] == "INBOX" assert data["sender"] == "john.doe@example.com" assert data["subject"] == "Test subject" - assert data["text"] + assert "Test body" in data["text"] assert ( valid_date and isinstance(data["date"], datetime) @@ -163,6 +168,48 @@ async def test_receiving_message_successfully( ) +@pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE]) +@pytest.mark.parametrize( + ("imap_fetch"), + [ + TEST_FETCH_RESPONSE_MULTIPART_BASE64_INVALID, + ], + ids=[ + "multipart_base64_invalid", + ], +) +@pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"]) +async def test_receiving_message_with_invalid_encoding( + hass: HomeAssistant, mock_imap_protocol: MagicMock +) -> None: + """Test receiving a message successfully.""" + event_called = async_capture_events(hass, "imap_content") + + config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + # Make sure we have had one update (when polling) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) + await hass.async_block_till_done() + state = hass.states.get("sensor.imap_email_email_com") + # we should have received one message + assert state is not None + assert state.state == "1" + assert state.attributes["state_class"] == SensorStateClass.MEASUREMENT + + # we should have received one event + assert len(event_called) == 1 + data: dict[str, Any] = event_called[0].data + assert data["server"] == "imap.server.com" + assert data["username"] == "email@email.com" + assert data["search"] == "UnSeen UnDeleted" + assert data["folder"] == "INBOX" + assert data["sender"] == "john.doe@example.com" + assert data["subject"] == "Test subject" + assert data["text"] == TEST_BADLY_ENCODED_CONTENT + + @pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE]) @pytest.mark.parametrize("imap_fetch", [TEST_FETCH_RESPONSE_NO_SUBJECT_TO_FROM]) @pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"]) @@ -196,7 +243,7 @@ async def test_receiving_message_no_subject_to_from( assert data["date"] == datetime( 2023, 3, 24, 13, 52, tzinfo=timezone(timedelta(seconds=3600)) ) - assert data["text"] == "Test body\r\n\r\n" + assert data["text"] == "Test body\r\n" assert data["headers"]["Return-Path"] == ("",) assert data["headers"]["Delivered-To"] == ("notify@example.com",) diff --git a/tests/components/input_select/test_init.py b/tests/components/input_select/test_init.py index 03c503ae494..3978d0cf175 100644 --- a/tests/components/input_select/test_init.py +++ b/tests/components/input_select/test_init.py @@ -740,7 +740,7 @@ async def test_update_duplicates( ) resp = await client.receive_json() assert not resp["success"] - assert resp["error"]["code"] == "unknown_error" + assert resp["error"]["code"] == "home_assistant_error" assert resp["error"]["message"] == "Duplicate options are not allowed" state = hass.states.get(input_entity_id) @@ -812,7 +812,7 @@ async def test_ws_create_duplicates( ) resp = await client.receive_json() assert not resp["success"] - assert resp["error"]["code"] == "unknown_error" + assert resp["error"]["code"] == "home_assistant_error" assert resp["error"]["message"] == "Duplicate options are not allowed" assert not hass.states.get(input_entity_id) diff --git a/tests/components/insteon/const.py b/tests/components/insteon/const.py index e731c51d6c6..53db12acb04 100644 --- a/tests/components/insteon/const.py +++ b/tests/components/insteon/const.py @@ -38,6 +38,10 @@ MOCK_USER_INPUT_PLM = { CONF_DEVICE: MOCK_DEVICE, } +MOCK_USER_INPUT_PLM_MANUAL = { + CONF_DEVICE: "manual", +} + MOCK_USER_INPUT_HUB_V2 = { CONF_HOST: MOCK_HOSTNAME, CONF_USERNAME: MOCK_USERNAME, diff --git a/tests/components/insteon/test_config_flow.py b/tests/components/insteon/test_config_flow.py index e15b7b2a287..106c93071be 100644 --- a/tests/components/insteon/test_config_flow.py +++ b/tests/components/insteon/test_config_flow.py @@ -1,5 +1,4 @@ """Test the config flow for the Insteon integration.""" - from unittest.mock import patch import pytest @@ -15,6 +14,7 @@ from homeassistant.components.insteon.config_flow import ( STEP_HUB_V1, STEP_HUB_V2, STEP_PLM, + STEP_PLM_MANUALLY, STEP_REMOVE_OVERRIDE, STEP_REMOVE_X10, ) @@ -45,6 +45,7 @@ from .const import ( MOCK_USER_INPUT_HUB_V1, MOCK_USER_INPUT_HUB_V2, MOCK_USER_INPUT_PLM, + MOCK_USER_INPUT_PLM_MANUAL, PATCH_ASYNC_SETUP, PATCH_ASYNC_SETUP_ENTRY, PATCH_CONNECTION, @@ -155,6 +156,41 @@ async def test_form_select_plm(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 +async def test_form_select_plm_no_usb(hass: HomeAssistant) -> None: + """Test we set up the PLM when no comm ports are found.""" + + temp_usb_list = dict(USB_PORTS) + USB_PORTS.clear() + result = await _init_form(hass, STEP_PLM) + + result2, _, _ = await _device_form( + hass, result["flow_id"], mock_successful_connection, None + ) + USB_PORTS.update(temp_usb_list) + assert result2["type"] == "form" + assert result2["step_id"] == STEP_PLM_MANUALLY + + +async def test_form_select_plm_manual(hass: HomeAssistant) -> None: + """Test we set up the PLM correctly.""" + + result = await _init_form(hass, STEP_PLM) + + result2, mock_setup, mock_setup_entry = await _device_form( + hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM_MANUAL + ) + + result3, mock_setup, mock_setup_entry = await _device_form( + hass, result2["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM + ) + assert result2["type"] == "form" + assert result3["type"] == "create_entry" + assert result3["data"] == MOCK_USER_INPUT_PLM + + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_form_select_hub_v1(hass: HomeAssistant) -> None: """Test we set up the Hub v1 correctly.""" @@ -225,6 +261,21 @@ async def test_failed_connection_plm(hass: HomeAssistant) -> None: assert result2["errors"] == {"base": "cannot_connect"} +async def test_failed_connection_plm_manually(hass: HomeAssistant) -> None: + """Test a failed connection with the PLM.""" + + result = await _init_form(hass, STEP_PLM) + + result2, _, _ = await _device_form( + hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM_MANUAL + ) + result3, _, _ = await _device_form( + hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM + ) + assert result3["type"] == "form" + assert result3["errors"] == {"base": "cannot_connect"} + + async def test_failed_connection_hub(hass: HomeAssistant) -> None: """Test a failed connection with a Hub.""" diff --git a/tests/components/insteon/test_init.py b/tests/components/insteon/test_init.py index 15f529babd8..f772eed2d26 100644 --- a/tests/components/insteon/test_init.py +++ b/tests/components/insteon/test_init.py @@ -76,7 +76,8 @@ async def test_import_frontend_dev_url(hass: HomeAssistant) -> None: ), patch.object(insteon, "close_insteon_connection"), patch.object( insteon, "devices", new=MockDevices() ), patch( - PATCH_CONNECTION, new=mock_successful_connection + PATCH_CONNECTION, + new=mock_successful_connection, ): assert await async_setup_component( hass, diff --git a/tests/components/insteon/test_lock.py b/tests/components/insteon/test_lock.py index f96e33af1c8..c100acae3ce 100644 --- a/tests/components/insteon/test_lock.py +++ b/tests/components/insteon/test_lock.py @@ -47,7 +47,9 @@ def patch_setup_and_devices(): ), patch.object(insteon, "devices", devices), patch.object( insteon_utils, "devices", devices ), patch.object( - insteon_entity, "devices", devices + insteon_entity, + "devices", + devices, ): yield diff --git a/tests/components/ipma/snapshots/test_weather.ambr b/tests/components/ipma/snapshots/test_weather.ambr index 92e1d1a91b5..0a778776329 100644 --- a/tests/components/ipma/snapshots/test_weather.ambr +++ b/tests/components/ipma/snapshots/test_weather.ambr @@ -36,6 +36,125 @@ ]), }) # --- +# name: test_forecast_service[forecast] + dict({ + 'weather.hometown': dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 16, 0, 0), + 'precipitation_probability': '100.0', + 'temperature': 16.2, + 'templow': 10.6, + 'wind_bearing': 'S', + 'wind_speed': 10.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].1 + dict({ + 'weather.hometown': dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + dict({ + 'condition': 'clear-night', + 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 16, 0, 0), + 'precipitation_probability': '100.0', + 'temperature': 16.2, + 'templow': 10.6, + 'wind_bearing': 'S', + 'wind_speed': 10.0, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + dict({ + 'condition': 'clear-night', + 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecasts] + dict({ + 'weather.hometown': dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 16, 0, 0), + 'precipitation_probability': '100.0', + 'temperature': 16.2, + 'templow': 10.6, + 'wind_bearing': 'S', + 'wind_speed': 10.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].1 + dict({ + 'weather.hometown': dict({ + 'forecast': list([ + dict({ + 'condition': 'rainy', + 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + dict({ + 'condition': 'clear-night', + 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), + 'precipitation_probability': 80.0, + 'temperature': 12.0, + 'wind_bearing': 'S', + 'wind_speed': 32.7, + }), + ]), + }), + }) +# --- # name: test_forecast_subscription[daily] list([ dict({ diff --git a/tests/components/ipma/test_weather.py b/tests/components/ipma/test_weather.py index 71884e0c82e..9e0262733a3 100644 --- a/tests/components/ipma/test_weather.py +++ b/tests/components/ipma/test_weather.py @@ -22,7 +22,8 @@ from homeassistant.components.weather import ( ATTR_WEATHER_WIND_BEARING, ATTR_WEATHER_WIND_SPEED, DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, ) from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -152,9 +153,17 @@ async def test_failed_get_observation_forecast(hass: HomeAssistant) -> None: assert state.attributes.get("friendly_name") == "HomeTown" +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) async def test_forecast_service( hass: HomeAssistant, snapshot: SnapshotAssertion, + service: str, ) -> None: """Test multiple forecast.""" @@ -169,7 +178,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.hometown", "type": "daily", @@ -181,7 +190,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.hometown", "type": "hourly", diff --git a/tests/components/iqvia/conftest.py b/tests/components/iqvia/conftest.py index 075d7249d36..b24d473c7df 100644 --- a/tests/components/iqvia/conftest.py +++ b/tests/components/iqvia/conftest.py @@ -94,13 +94,9 @@ async def setup_iqvia_fixture( "pyiqvia.allergens.Allergens.outlook", return_value=data_allergy_outlook ), patch( "pyiqvia.asthma.Asthma.extended", return_value=data_asthma_forecast - ), patch( - "pyiqvia.asthma.Asthma.current", return_value=data_asthma_index - ), patch( + ), patch("pyiqvia.asthma.Asthma.current", return_value=data_asthma_index), patch( "pyiqvia.disease.Disease.extended", return_value=data_disease_forecast - ), patch( - "pyiqvia.disease.Disease.current", return_value=data_disease_index - ), patch( + ), patch("pyiqvia.disease.Disease.current", return_value=data_disease_index), patch( "homeassistant.components.iqvia.PLATFORMS", [] ): assert await async_setup_component(hass, DOMAIN, config) diff --git a/tests/components/islamic_prayer_times/__init__.py b/tests/components/islamic_prayer_times/__init__.py index b93c46108d8..8750461c47f 100644 --- a/tests/components/islamic_prayer_times/__init__.py +++ b/tests/components/islamic_prayer_times/__init__.py @@ -5,43 +5,23 @@ from datetime import datetime import homeassistant.util.dt as dt_util PRAYER_TIMES = { - "Fajr": "06:10", - "Sunrise": "07:25", - "Dhuhr": "12:30", - "Asr": "15:32", - "Maghrib": "17:35", - "Isha": "18:53", - "Midnight": "00:45", -} - -PRAYER_TIMES_TIMESTAMPS = { - "Fajr": datetime(2020, 1, 1, 6, 10, 0, tzinfo=dt_util.UTC), - "Sunrise": datetime(2020, 1, 1, 7, 25, 0, tzinfo=dt_util.UTC), - "Dhuhr": datetime(2020, 1, 1, 12, 30, 0, tzinfo=dt_util.UTC), - "Asr": datetime(2020, 1, 1, 15, 32, 0, tzinfo=dt_util.UTC), - "Maghrib": datetime(2020, 1, 1, 17, 35, 0, tzinfo=dt_util.UTC), - "Isha": datetime(2020, 1, 1, 18, 53, 0, tzinfo=dt_util.UTC), - "Midnight": datetime(2020, 1, 1, 00, 45, 0, tzinfo=dt_util.UTC), + "Fajr": "2020-01-01T06:10:00+00:00", + "Sunrise": "2020-01-01T07:25:00+00:00", + "Dhuhr": "2020-01-01T12:30:00+00:00", + "Asr": "2020-01-01T15:32:00+00:00", + "Maghrib": "2020-01-01T17:35:00+00:00", + "Isha": "2020-01-01T18:53:00+00:00", + "Midnight": "2020-01-01T00:45:00+00:00", } NEW_PRAYER_TIMES = { - "Fajr": "06:00", - "Sunrise": "07:25", - "Dhuhr": "12:30", - "Asr": "15:32", - "Maghrib": "17:45", - "Isha": "18:53", - "Midnight": "00:43", -} - -NEW_PRAYER_TIMES_TIMESTAMPS = { - "Fajr": datetime(2020, 1, 2, 6, 00, 0, tzinfo=dt_util.UTC), - "Sunrise": datetime(2020, 1, 2, 7, 25, 0, tzinfo=dt_util.UTC), - "Dhuhr": datetime(2020, 1, 2, 12, 30, 0, tzinfo=dt_util.UTC), - "Asr": datetime(2020, 1, 2, 15, 32, 0, tzinfo=dt_util.UTC), - "Maghrib": datetime(2020, 1, 2, 17, 45, 0, tzinfo=dt_util.UTC), - "Isha": datetime(2020, 1, 2, 18, 53, 0, tzinfo=dt_util.UTC), - "Midnight": datetime(2020, 1, 2, 00, 43, 0, tzinfo=dt_util.UTC), + "Fajr": "2020-01-02T06:00:00+00:00", + "Sunrise": "2020-01-02T07:25:00+00:00", + "Dhuhr": "2020-01-02T12:30:00+00:00", + "Asr": "2020-01-02T15:32:00+00:00", + "Maghrib": "2020-01-02T17:45:00+00:00", + "Isha": "2020-01-02T18:53:00+00:00", + "Midnight": "2020-01-02T00:43:00+00:00", } NOW = datetime(2020, 1, 1, 00, 00, 0, tzinfo=dt_util.UTC) diff --git a/tests/components/islamic_prayer_times/test_init.py b/tests/components/islamic_prayer_times/test_init.py index 0a41630e29b..0c3f19e43fe 100644 --- a/tests/components/islamic_prayer_times/test_init.py +++ b/tests/components/islamic_prayer_times/test_init.py @@ -13,8 +13,9 @@ from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +import homeassistant.util.dt as dt_util -from . import NEW_PRAYER_TIMES, NOW, PRAYER_TIMES, PRAYER_TIMES_TIMESTAMPS +from . import NEW_PRAYER_TIMES, NOW, PRAYER_TIMES from tests.common import MockConfigEntry, async_fire_time_changed @@ -90,7 +91,7 @@ async def test_options_listener(hass: HomeAssistant) -> None: with patch( "prayer_times_calculator.PrayerTimesCalculator.fetch_prayer_times", return_value=PRAYER_TIMES, - ) as mock_fetch_prayer_times: + ) as mock_fetch_prayer_times, freeze_time(NOW): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert mock_fetch_prayer_times.call_count == 1 @@ -123,7 +124,9 @@ async def test_update_failed(hass: HomeAssistant) -> None: InvalidResponseError, NEW_PRAYER_TIMES, ] - future = PRAYER_TIMES_TIMESTAMPS["Midnight"] + timedelta(days=1, minutes=1) + midnight_time = dt_util.parse_datetime(PRAYER_TIMES["Midnight"]) + assert midnight_time + future = midnight_time + timedelta(days=1, minutes=1) with freeze_time(future): async_fire_time_changed(hass, future) await hass.async_block_till_done() diff --git a/tests/components/islamic_prayer_times/test_sensor.py b/tests/components/islamic_prayer_times/test_sensor.py index e7f3759f993..164ac8818fe 100644 --- a/tests/components/islamic_prayer_times/test_sensor.py +++ b/tests/components/islamic_prayer_times/test_sensor.py @@ -6,9 +6,8 @@ import pytest from homeassistant.components.islamic_prayer_times.const import DOMAIN from homeassistant.core import HomeAssistant -import homeassistant.util.dt as dt_util -from . import NOW, PRAYER_TIMES, PRAYER_TIMES_TIMESTAMPS +from . import NOW, PRAYER_TIMES from tests.common import MockConfigEntry @@ -44,7 +43,4 @@ async def test_islamic_prayer_times_sensors( ), freeze_time(NOW): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert ( - hass.states.get(sensor_name).state - == PRAYER_TIMES_TIMESTAMPS[key].astimezone(dt_util.UTC).isoformat() - ) + assert hass.states.get(sensor_name).state == PRAYER_TIMES[key] diff --git a/tests/components/kaleidescape/test_init.py b/tests/components/kaleidescape/test_init.py index d0826f4714a..28d90290996 100644 --- a/tests/components/kaleidescape/test_init.py +++ b/tests/components/kaleidescape/test_init.py @@ -47,11 +47,11 @@ async def test_config_entry_not_ready( async def test_device( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, mock_device: AsyncMock, mock_integration: MockConfigEntry, ) -> None: """Test device.""" - device_registry = dr.async_get(hass) device = device_registry.async_get_device( identifiers={("kaleidescape", MOCK_SERIAL)} ) diff --git a/tests/components/kaleidescape/test_media_player.py b/tests/components/kaleidescape/test_media_player.py index f38c61d3e73..ad7dcbcaa51 100644 --- a/tests/components/kaleidescape/test_media_player.py +++ b/tests/components/kaleidescape/test_media_player.py @@ -170,11 +170,11 @@ async def test_services( async def test_device( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, mock_device: MagicMock, mock_integration: MockConfigEntry, ) -> None: """Test device attributes.""" - device_registry = dr.async_get(hass) device = device_registry.async_get_device( identifiers={("kaleidescape", MOCK_SERIAL)} ) diff --git a/tests/components/kaleidescape/test_sensor.py b/tests/components/kaleidescape/test_sensor.py index 3fbff29e3e9..70406872464 100644 --- a/tests/components/kaleidescape/test_sensor.py +++ b/tests/components/kaleidescape/test_sensor.py @@ -18,12 +18,13 @@ FRIENDLY_NAME = f"Kaleidescape Device {MOCK_SERIAL}" async def test_sensors( hass: HomeAssistant, + entity_registry: er.EntityRegistry, mock_device: MagicMock, mock_integration: MockConfigEntry, ) -> None: """Test sensors.""" entity = hass.states.get(f"{ENTITY_ID}_media_location") - entry = er.async_get(hass).async_get(f"{ENTITY_ID}_media_location") + entry = entity_registry.async_get(f"{ENTITY_ID}_media_location") assert entity assert entity.state == "none" assert ( @@ -33,7 +34,7 @@ async def test_sensors( assert entry.unique_id == f"{MOCK_SERIAL}-media_location" entity = hass.states.get(f"{ENTITY_ID}_play_status") - entry = er.async_get(hass).async_get(f"{ENTITY_ID}_play_status") + entry = entity_registry.async_get(f"{ENTITY_ID}_play_status") assert entity assert entity.state == "none" assert entity.attributes.get(ATTR_FRIENDLY_NAME) == f"{FRIENDLY_NAME} Play status" diff --git a/tests/components/knx/test_binary_sensor.py b/tests/components/knx/test_binary_sensor.py index 47715433a52..aace7a0224c 100644 --- a/tests/components/knx/test_binary_sensor.py +++ b/tests/components/knx/test_binary_sensor.py @@ -24,7 +24,7 @@ from tests.common import ( async def test_binary_sensor_entity_category( - hass: HomeAssistant, knx: KNXTestKit + hass: HomeAssistant, entity_registry: er.EntityRegistry, knx: KNXTestKit ) -> None: """Test KNX binary sensor entity category.""" await knx.setup_integration( @@ -42,8 +42,7 @@ async def test_binary_sensor_entity_category( await knx.assert_read("1/1/1") await knx.receive_response("1/1/1", True) - registry = er.async_get(hass) - entity = registry.async_get("binary_sensor.test_normal") + entity = entity_registry.async_get("binary_sensor.test_normal") assert entity.entity_category is EntityCategory.DIAGNOSTIC diff --git a/tests/components/knx/test_button.py b/tests/components/knx/test_button.py index 3e8519feb98..a905e66fe5d 100644 --- a/tests/components/knx/test_button.py +++ b/tests/components/knx/test_button.py @@ -130,9 +130,9 @@ async def test_button_invalid( assert len(caplog.messages) == 2 record = caplog.records[0] assert record.levelname == "ERROR" - assert f"Invalid config for [knx]: {error_msg}" in record.message + assert f"Invalid config for 'knx': {error_msg}" in record.message record = caplog.records[1] assert record.levelname == "ERROR" - assert "Setup failed for knx: Invalid config." in record.message + assert "Setup failed for 'knx': Invalid config." in record.message assert hass.states.get("button.test") is None assert hass.data.get(DOMAIN) is None diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index 5d42ed79542..0f2d8e56050 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -77,9 +77,9 @@ def patch_file_upload(return_value=FIXTURE_KEYRING, side_effect=None): return_value=return_value, side_effect=side_effect, ), patch( - "pathlib.Path.mkdir" + "pathlib.Path.mkdir", ) as mkdir_mock, patch( - "shutil.move" + "shutil.move", ) as shutil_move_mock: file_upload_mock.return_value.__enter__.return_value = Mock() yield return_value diff --git a/tests/components/komfovent/__init__.py b/tests/components/komfovent/__init__.py deleted file mode 100644 index e5492a52327..00000000000 --- a/tests/components/komfovent/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Komfovent integration.""" diff --git a/tests/components/komfovent/conftest.py b/tests/components/komfovent/conftest.py deleted file mode 100644 index d9cb0950c74..00000000000 --- a/tests/components/komfovent/conftest.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Common fixtures for the Komfovent tests.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -import pytest - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.komfovent.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry diff --git a/tests/components/komfovent/test_config_flow.py b/tests/components/komfovent/test_config_flow.py deleted file mode 100644 index 008d92e36a3..00000000000 --- a/tests/components/komfovent/test_config_flow.py +++ /dev/null @@ -1,189 +0,0 @@ -"""Test the Komfovent config flow.""" -from unittest.mock import AsyncMock, patch - -import komfovent_api -import pytest - -from homeassistant import config_entries -from homeassistant.components.komfovent.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResult, FlowResultType - -from tests.common import MockConfigEntry - - -async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: - """Test flow completes as expected.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] == FlowResultType.FORM - assert result["errors"] is None - - final_result = await __test_normal_flow(hass, result["flow_id"]) - assert final_result["type"] == FlowResultType.CREATE_ENTRY - assert final_result["title"] == "test-name" - assert final_result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("error", "expected_response"), - [ - (komfovent_api.KomfoventConnectionResult.NOT_FOUND, "cannot_connect"), - (komfovent_api.KomfoventConnectionResult.UNAUTHORISED, "invalid_auth"), - (komfovent_api.KomfoventConnectionResult.INVALID_INPUT, "invalid_input"), - ], -) -async def test_flow_error_authenticating( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - error: komfovent_api.KomfoventConnectionResult, - expected_response: str, -) -> None: - """Test errors during flow authentication step are handled and dont affect final result.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - with patch( - "homeassistant.components.komfovent.config_flow.komfovent_api.get_credentials", - return_value=( - error, - None, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result2["type"] == FlowResultType.FORM - assert result2["errors"] == {"base": expected_response} - - final_result = await __test_normal_flow(hass, result2["flow_id"]) - assert final_result["type"] == FlowResultType.CREATE_ENTRY - assert final_result["title"] == "test-name" - assert final_result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("error", "expected_response"), - [ - (komfovent_api.KomfoventConnectionResult.NOT_FOUND, "cannot_connect"), - (komfovent_api.KomfoventConnectionResult.UNAUTHORISED, "invalid_auth"), - (komfovent_api.KomfoventConnectionResult.INVALID_INPUT, "invalid_input"), - ], -) -async def test_flow_error_device_info( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - error: komfovent_api.KomfoventConnectionResult, - expected_response: str, -) -> None: - """Test errors during flow device info download step are handled and dont affect final result.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - with patch( - "homeassistant.components.komfovent.config_flow.komfovent_api.get_credentials", - return_value=( - komfovent_api.KomfoventConnectionResult.SUCCESS, - komfovent_api.KomfoventCredentials("1.1.1.1", "user", "pass"), - ), - ), patch( - "homeassistant.components.komfovent.config_flow.komfovent_api.get_settings", - return_value=( - error, - None, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - - assert result2["type"] == FlowResultType.FORM - assert result2["errors"] == {"base": expected_response} - - final_result = await __test_normal_flow(hass, result2["flow_id"]) - assert final_result["type"] == FlowResultType.CREATE_ENTRY - assert final_result["title"] == "test-name" - assert final_result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_device_already_exists( - hass: HomeAssistant, mock_setup_entry: AsyncMock -) -> None: - """Test device is not added when it already exists.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - unique_id="test-uid", - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] == FlowResultType.FORM - assert result["errors"] is None - - final_result = await __test_normal_flow(hass, result["flow_id"]) - assert final_result["type"] == FlowResultType.ABORT - assert final_result["reason"] == "already_configured" - - -async def __test_normal_flow(hass: HomeAssistant, flow_id: str) -> FlowResult: - """Test flow completing as expected, no matter what happened before.""" - - with patch( - "homeassistant.components.komfovent.config_flow.komfovent_api.get_credentials", - return_value=( - komfovent_api.KomfoventConnectionResult.SUCCESS, - komfovent_api.KomfoventCredentials("1.1.1.1", "user", "pass"), - ), - ), patch( - "homeassistant.components.komfovent.config_flow.komfovent_api.get_settings", - return_value=( - komfovent_api.KomfoventConnectionResult.SUCCESS, - komfovent_api.KomfoventSettings("test-name", None, None, "test-uid"), - ), - ): - final_result = await hass.config_entries.flow.async_configure( - flow_id, - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - return final_result diff --git a/tests/components/kostal_plenticore/conftest.py b/tests/components/kostal_plenticore/conftest.py index 814a46f4a25..a83d9fd5e17 100644 --- a/tests/components/kostal_plenticore/conftest.py +++ b/tests/components/kostal_plenticore/conftest.py @@ -49,24 +49,20 @@ def mock_plenticore() -> Generator[Plenticore, None, None]: plenticore.client.get_version = AsyncMock() plenticore.client.get_version.return_value = VersionData( - { - "api_version": "0.2.0", - "hostname": "scb", - "name": "PUCK RESTful API", - "sw_version": "01.16.05025", - } + api_version="0.2.0", + hostname="scb", + name="PUCK RESTful API", + sw_version="01.16.05025", ) plenticore.client.get_me = AsyncMock() plenticore.client.get_me.return_value = MeData( - { - "locked": False, - "active": True, - "authenticated": True, - "permissions": [], - "anonymous": False, - "role": "USER", - } + locked=False, + active=True, + authenticated=True, + permissions=[], + anonymous=False, + role="USER", ) plenticore.client.get_process_data = AsyncMock() diff --git a/tests/components/kostal_plenticore/test_config_flow.py b/tests/components/kostal_plenticore/test_config_flow.py index 41facfe9c26..8bfe227bfdf 100644 --- a/tests/components/kostal_plenticore/test_config_flow.py +++ b/tests/components/kostal_plenticore/test_config_flow.py @@ -54,7 +54,19 @@ async def test_form_g1( # mock of the context manager instance mock_apiclient.login = AsyncMock() mock_apiclient.get_settings = AsyncMock( - return_value={"scb:network": [SettingsData({"id": "Hostname"})]} + return_value={ + "scb:network": [ + SettingsData( + min="1", + max="63", + default=None, + access="readwrite", + unit=None, + id="Hostname", + type="string", + ), + ] + } ) mock_apiclient.get_setting_values = AsyncMock( # G1 model has the entry id "Hostname" @@ -108,7 +120,19 @@ async def test_form_g2( # mock of the context manager instance mock_apiclient.login = AsyncMock() mock_apiclient.get_settings = AsyncMock( - return_value={"scb:network": [SettingsData({"id": "Network:Hostname"})]} + return_value={ + "scb:network": [ + SettingsData( + min="1", + max="63", + default=None, + access="readwrite", + unit=None, + id="Network:Hostname", + type="string", + ), + ] + } ) mock_apiclient.get_setting_values = AsyncMock( # G1 model has the entry id "Hostname" diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index d6a57648400..87c8c0e26a8 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -26,15 +26,13 @@ async def test_entry_diagnostics( mock_plenticore.client.get_settings.return_value = { "devices:local": [ SettingsData( - { - "id": "Battery:MinSoc", - "unit": "%", - "default": "None", - "min": 5, - "max": 100, - "type": "byte", - "access": "readwrite", - } + min="5", + max="100", + default=None, + access="readwrite", + unit="%", + id="Battery:MinSoc", + type="byte", ) ] } @@ -56,12 +54,12 @@ async def test_entry_diagnostics( "disabled_by": None, }, "client": { - "version": "Version(api_version=0.2.0, hostname=scb, name=PUCK RESTful API, sw_version=01.16.05025)", - "me": "Me(locked=False, active=True, authenticated=True, permissions=[], anonymous=False, role=USER)", + "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", + "me": "is_locked=False is_active=True is_authenticated=True permissions=[] is_anonymous=False role='USER'", "available_process_data": {"devices:local": ["HomeGrid_P", "HomePv_P"]}, "available_settings_data": { "devices:local": [ - "SettingsData(id=Battery:MinSoc, unit=%, default=None, min=5, max=100,type=byte, access=readwrite)" + "min='5' max='100' default=None access='readwrite' unit='%' id='Battery:MinSoc' type='byte'" ] }, }, diff --git a/tests/components/kostal_plenticore/test_helper.py b/tests/components/kostal_plenticore/test_helper.py index 61df222fd9e..93550405897 100644 --- a/tests/components/kostal_plenticore/test_helper.py +++ b/tests/components/kostal_plenticore/test_helper.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from pykoplenti import ApiClient, SettingsData +from pykoplenti import ApiClient, ExtendedApiClient, SettingsData import pytest from homeassistant.components.kostal_plenticore.const import DOMAIN @@ -17,10 +17,10 @@ from tests.common import MockConfigEntry def mock_apiclient() -> Generator[ApiClient, None, None]: """Return a mocked ApiClient class.""" with patch( - "homeassistant.components.kostal_plenticore.helper.ApiClient", + "homeassistant.components.kostal_plenticore.helper.ExtendedApiClient", autospec=True, ) as mock_api_class: - apiclient = MagicMock(spec=ApiClient) + apiclient = MagicMock(spec=ExtendedApiClient) apiclient.__aenter__.return_value = apiclient apiclient.__aexit__ = AsyncMock() mock_api_class.return_value = apiclient @@ -34,7 +34,19 @@ async def test_plenticore_async_setup_g1( ) -> None: """Tests the async_setup() method of the Plenticore class for G1 models.""" mock_apiclient.get_settings = AsyncMock( - return_value={"scb:network": [SettingsData({"id": "Hostname"})]} + return_value={ + "scb:network": [ + SettingsData( + min="1", + max="63", + default=None, + access="readwrite", + unit=None, + id="Hostname", + type="string", + ) + ] + } ) mock_apiclient.get_setting_values = AsyncMock( # G1 model has the entry id "Hostname" @@ -74,7 +86,19 @@ async def test_plenticore_async_setup_g2( ) -> None: """Tests the async_setup() method of the Plenticore class for G2 models.""" mock_apiclient.get_settings = AsyncMock( - return_value={"scb:network": [SettingsData({"id": "Network:Hostname"})]} + return_value={ + "scb:network": [ + SettingsData( + min="1", + max="63", + default=None, + access="readwrite", + unit=None, + id="Network:Hostname", + type="string", + ) + ] + } ) mock_apiclient.get_setting_values = AsyncMock( # G1 model has the entry id "Hostname" diff --git a/tests/components/kostal_plenticore/test_number.py b/tests/components/kostal_plenticore/test_number.py index dd5ba7127a8..fc7d9f213fe 100644 --- a/tests/components/kostal_plenticore/test_number.py +++ b/tests/components/kostal_plenticore/test_number.py @@ -23,9 +23,9 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture def mock_plenticore_client() -> Generator[ApiClient, None, None]: - """Return a patched ApiClient.""" + """Return a patched ExtendedApiClient.""" with patch( - "homeassistant.components.kostal_plenticore.helper.ApiClient", + "homeassistant.components.kostal_plenticore.helper.ExtendedApiClient", autospec=True, ) as plenticore_client_class: yield plenticore_client_class.return_value @@ -41,39 +41,33 @@ def mock_get_setting_values(mock_plenticore_client: ApiClient) -> list: mock_plenticore_client.get_settings.return_value = { "devices:local": [ SettingsData( - { - "default": None, - "min": 5, - "max": 100, - "access": "readwrite", - "unit": "%", - "type": "byte", - "id": "Battery:MinSoc", - } + min="5", + max="100", + default=None, + access="readwrite", + unit="%", + id="Battery:MinSoc", + type="byte", ), SettingsData( - { - "default": None, - "min": 50, - "max": 38000, - "access": "readwrite", - "unit": "W", - "type": "byte", - "id": "Battery:MinHomeComsumption", - } + min="50", + max="38000", + default=None, + access="readwrite", + unit="W", + id="Battery:MinHomeComsumption", + type="byte", ), ], "scb:network": [ SettingsData( - { - "min": "1", - "default": None, - "access": "readwrite", - "unit": None, - "id": "Hostname", - "type": "string", - "max": "63", - } + min="1", + max="63", + default=None, + access="readwrite", + unit=None, + id="Hostname", + type="string", ) ], } @@ -129,15 +123,13 @@ async def test_setup_no_entries( mock_plenticore_client.get_settings.return_value = { "scb:network": [ SettingsData( - { - "min": "1", - "default": None, - "access": "readwrite", - "unit": None, - "id": "Hostname", - "type": "string", - "max": "63", - } + min="1", + max="63", + default=None, + access="readwrite", + unit=None, + id="Hostname", + type="string", ) ], } diff --git a/tests/components/kostal_plenticore/test_select.py b/tests/components/kostal_plenticore/test_select.py index 682e8f72ac8..9af2589af9b 100644 --- a/tests/components/kostal_plenticore/test_select.py +++ b/tests/components/kostal_plenticore/test_select.py @@ -18,8 +18,24 @@ async def test_select_battery_charging_usage_available( mock_plenticore.client.get_settings.return_value = { "devices:local": [ - SettingsData({"id": "Battery:SmartBatteryControl:Enable"}), - SettingsData({"id": "Battery:TimeControl:Enable"}), + SettingsData( + min=None, + max=None, + default=None, + access="readwrite", + unit=None, + id="Battery:SmartBatteryControl:Enable", + type="string", + ), + SettingsData( + min=None, + max=None, + default=None, + access="readwrite", + unit=None, + id="Battery:TimeControl:Enable", + type="string", + ), ] } diff --git a/tests/components/kraken/test_sensor.py b/tests/components/kraken/test_sensor.py index 5ef913ab74b..3ba351a4225 100644 --- a/tests/components/kraken/test_sensor.py +++ b/tests/components/kraken/test_sensor.py @@ -134,7 +134,9 @@ async def test_sensor( async def test_sensors_available_after_restart( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test that all sensors are added again after a restart.""" with patch( @@ -153,7 +155,6 @@ async def test_sensors_available_after_restart( ) entry.add_to_hass(hass) - device_registry = dr.async_get(hass) device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, "XBT_USD")}, diff --git a/tests/components/lametric/test_helpers.py b/tests/components/lametric/test_helpers.py index 9a03a4d52cf..a1b824086d2 100644 --- a/tests/components/lametric/test_helpers.py +++ b/tests/components/lametric/test_helpers.py @@ -12,12 +12,11 @@ from tests.common import MockConfigEntry async def test_get_coordinator_by_device_id( hass: HomeAssistant, + entity_registry: er.EntityRegistry, init_integration: MockConfigEntry, mock_lametric: MagicMock, ) -> None: """Test get LaMetric coordinator by device ID .""" - entity_registry = er.async_get(hass) - with pytest.raises(ValueError, match="Unknown LaMetric device ID: bla"): async_get_coordinator_by_device_id(hass, "bla") diff --git a/tests/components/lametric/test_services.py b/tests/components/lametric/test_services.py index 6a6ff4256a7..9a1258a82bb 100644 --- a/tests/components/lametric/test_services.py +++ b/tests/components/lametric/test_services.py @@ -34,10 +34,10 @@ pytestmark = pytest.mark.usefixtures("init_integration") async def test_service_chart( hass: HomeAssistant, + entity_registry: er.EntityRegistry, mock_lametric: MagicMock, ) -> None: """Test the LaMetric chart service.""" - entity_registry = er.async_get(hass) entry = entity_registry.async_get("button.frenck_s_lametric_next_app") assert entry @@ -121,10 +121,10 @@ async def test_service_chart( async def test_service_message( hass: HomeAssistant, + entity_registry: er.EntityRegistry, mock_lametric: MagicMock, ) -> None: """Test the LaMetric message service.""" - entity_registry = er.async_get(hass) entry = entity_registry.async_get("button.frenck_s_lametric_next_app") assert entry diff --git a/tests/components/landisgyr_heat_meter/test_init.py b/tests/components/landisgyr_heat_meter/test_init.py index 46fc07c5eb9..f8615aa77af 100644 --- a/tests/components/landisgyr_heat_meter/test_init.py +++ b/tests/components/landisgyr_heat_meter/test_init.py @@ -39,7 +39,9 @@ async def test_unload_entry(_, hass: HomeAssistant) -> None: @patch(API_HEAT_METER_SERVICE) -async def test_migrate_entry(_, hass: HomeAssistant) -> None: +async def test_migrate_entry( + _, hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test successful migration of entry data from version 1 to 2.""" mock_entry_data = { @@ -59,8 +61,7 @@ async def test_migrate_entry(_, hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) # Create entity entry to migrate to new unique ID - registry = er.async_get(hass) - registry.async_get_or_create( + entity_registry.async_get_or_create( SENSOR_DOMAIN, LANDISGYR_HEAT_METER_DOMAIN, "landisgyr_heat_meter_987654321_measuring_range_m3ph", @@ -74,5 +75,5 @@ async def test_migrate_entry(_, hass: HomeAssistant) -> None: # Check if entity unique id is migrated successfully assert mock_entry.version == 2 - entity = registry.async_get("sensor.heat_meter_measuring_range") + entity = entity_registry.async_get("sensor.heat_meter_measuring_range") assert entity.unique_id == "12345_measuring_range_m3ph" diff --git a/tests/components/lcn/test_binary_sensor.py b/tests/components/lcn/test_binary_sensor.py index 70df5af2305..c92a45d7cc9 100644 --- a/tests/components/lcn/test_binary_sensor.py +++ b/tests/components/lcn/test_binary_sensor.py @@ -37,9 +37,10 @@ async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: assert state -async def test_entity_attributes(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: """Test the attributes of an entity.""" - entity_registry = er.async_get(hass) entity_setpoint1 = entity_registry.async_get(BINARY_SENSOR_LOCKREGULATOR1) assert entity_setpoint1 diff --git a/tests/components/lcn/test_cover.py b/tests/components/lcn/test_cover.py index 74240c900be..4705591e1d3 100644 --- a/tests/components/lcn/test_cover.py +++ b/tests/components/lcn/test_cover.py @@ -38,9 +38,10 @@ async def test_setup_lcn_cover(hass: HomeAssistant, entry, lcn_connection) -> No assert state.state == STATE_OPEN -async def test_entity_attributes(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: """Test the attributes of an entity.""" - entity_registry = er.async_get(hass) entity_outputs = entity_registry.async_get(COVER_OUTPUTS) diff --git a/tests/components/lcn/test_device_trigger.py b/tests/components/lcn/test_device_trigger.py index 47287fbd1d2..59cabb309b0 100644 --- a/tests/components/lcn/test_device_trigger.py +++ b/tests/components/lcn/test_device_trigger.py @@ -49,12 +49,11 @@ async def test_get_triggers_module_device( async def test_get_triggers_non_module_device( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, device_registry: dr.DeviceRegistry, entry, lcn_connection ) -> None: """Test we get the expected triggers from a LCN non-module device.""" not_included_types = ("transmitter", "transponder", "fingerprint", "send_keys") - device_registry = dr.async_get(hass) host_device = device_registry.async_get_device( identifiers={(DOMAIN, entry.entry_id)} ) diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index a3b5b01ffbb..fb1d09d91d6 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -48,20 +48,23 @@ async def test_async_setup_multiple_entries(hass: HomeAssistant, entry, entry2) assert not hass.data.get(DOMAIN) -async def test_async_setup_entry_update(hass: HomeAssistant, entry) -> None: +async def test_async_setup_entry_update( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + entry, +) -> None: """Test a successful setup entry if entry with same id already exists.""" # setup first entry entry.source = config_entries.SOURCE_IMPORT entry.add_to_hass(hass) # create dummy entity for LCN platform as an orphan - entity_registry = er.async_get(hass) dummy_entity = entity_registry.async_get_or_create( "switch", DOMAIN, "dummy", config_entry=entry ) # create dummy device for LCN platform as an orphan - device_registry = dr.async_get(hass) dummy_device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, entry.entry_id, 0, 7, False)}, diff --git a/tests/components/lcn/test_light.py b/tests/components/lcn/test_light.py index 73827ad38bb..7f23c1e6214 100644 --- a/tests/components/lcn/test_light.py +++ b/tests/components/lcn/test_light.py @@ -58,10 +58,10 @@ async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] -async def test_entity_attributes(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: """Test the attributes of an entity.""" - entity_registry = er.async_get(hass) - entity_output = entity_registry.async_get(LIGHT_OUTPUT1) assert entity_output diff --git a/tests/components/lcn/test_sensor.py b/tests/components/lcn/test_sensor.py index 116ab62854d..b46de397255 100644 --- a/tests/components/lcn/test_sensor.py +++ b/tests/components/lcn/test_sensor.py @@ -49,9 +49,10 @@ async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: assert state -async def test_entity_attributes(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: """Test the attributes of an entity.""" - entity_registry = er.async_get(hass) entity_var1 = entity_registry.async_get(SENSOR_VAR1) assert entity_var1 diff --git a/tests/components/lcn/test_switch.py b/tests/components/lcn/test_switch.py index 44a9e410fe3..a83d45c0889 100644 --- a/tests/components/lcn/test_switch.py +++ b/tests/components/lcn/test_switch.py @@ -39,9 +39,10 @@ async def test_setup_lcn_switch(hass: HomeAssistant, lcn_connection) -> None: assert state.state == STATE_OFF -async def test_entity_attributes(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_entity_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +) -> None: """Test the attributes of an entity.""" - entity_registry = er.async_get(hass) entity_output = entity_registry.async_get(SWITCH_OUTPUT1) diff --git a/tests/components/lidarr/test_init.py b/tests/components/lidarr/test_init.py index 5d6961e57c3..ce3a8536b2f 100644 --- a/tests/components/lidarr/test_init.py +++ b/tests/components/lidarr/test_init.py @@ -45,12 +45,14 @@ async def test_async_setup_entry_auth_failed( async def test_device_info( - hass: HomeAssistant, setup_integration: ComponentSetup, connection + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + setup_integration: ComponentSetup, + connection, ) -> None: """Test device info.""" await setup_integration() entry = hass.config_entries.async_entries(DOMAIN)[0] - device_registry = dr.async_get(hass) await hass.async_block_till_done() device = device_registry.async_get_device(identifiers={(DOMAIN, entry.entry_id)}) diff --git a/tests/components/lifx/test_binary_sensor.py b/tests/components/lifx/test_binary_sensor.py index d71a7eeaf0b..9fa065f3632 100644 --- a/tests/components/lifx/test_binary_sensor.py +++ b/tests/components/lifx/test_binary_sensor.py @@ -31,7 +31,9 @@ from . import ( from tests.common import MockConfigEntry, async_fire_time_changed -async def test_hev_cycle_state(hass: HomeAssistant) -> None: +async def test_hev_cycle_state( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test HEV cycle state binary sensor.""" config_entry = MockConfigEntry( domain=lifx.DOMAIN, @@ -48,7 +50,6 @@ async def test_hev_cycle_state(hass: HomeAssistant) -> None: await hass.async_block_till_done() entity_id = "binary_sensor.my_bulb_clean_cycle" - entity_registry = er.async_get(hass) state = hass.states.get(entity_id) assert state diff --git a/tests/components/lifx/test_button.py b/tests/components/lifx/test_button.py index d527229fe78..1fd4da4531e 100644 --- a/tests/components/lifx/test_button.py +++ b/tests/components/lifx/test_button.py @@ -31,7 +31,9 @@ def mock_lifx_coordinator_sleep(): yield -async def test_button_restart(hass: HomeAssistant) -> None: +async def test_button_restart( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test that a bulb can be restarted.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -50,7 +52,6 @@ async def test_button_restart(hass: HomeAssistant) -> None: unique_id = f"{SERIAL}_restart" entity_id = "button.my_bulb_restart" - entity_registry = er.async_get(hass) entity = entity_registry.async_get(entity_id) assert entity assert not entity.disabled @@ -63,7 +64,9 @@ async def test_button_restart(hass: HomeAssistant) -> None: bulb.set_reboot.assert_called_once() -async def test_button_identify(hass: HomeAssistant) -> None: +async def test_button_identify( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test that a bulb can be identified.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -82,7 +85,6 @@ async def test_button_identify(hass: HomeAssistant) -> None: unique_id = f"{SERIAL}_identify" entity_id = "button.my_bulb_identify" - entity_registry = er.async_get(hass) entity = entity_registry.async_get(entity_id) assert entity assert not entity.disabled diff --git a/tests/components/lifx/test_config_flow.py b/tests/components/lifx/test_config_flow.py index 1b7da4f864a..70284106166 100644 --- a/tests/components/lifx/test_config_flow.py +++ b/tests/components/lifx/test_config_flow.py @@ -536,7 +536,11 @@ async def test_refuse_relays(hass: HomeAssistant) -> None: assert result2["errors"] == {"base": "cannot_connect"} -async def test_suggested_area(hass: HomeAssistant) -> None: +async def test_suggested_area( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: """Test suggested area is populated from lifx group label.""" class MockLifxCommandGetGroup: @@ -567,10 +571,8 @@ async def test_suggested_area(hass: HomeAssistant) -> None: await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() - entity_registry = er.async_get(hass) entity_id = "light.my_bulb" entity = entity_registry.async_get(entity_id) - device_registry = dr.async_get(hass) device = device_registry.async_get(entity.device_id) assert device.suggested_area == "My LIFX Group" diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 70a5a89a3ae..887e622b5cc 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -81,7 +81,11 @@ def patch_lifx_state_settle_delay(): yield -async def test_light_unique_id(hass: HomeAssistant) -> None: +async def test_light_unique_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: """Test a light unique id.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "1.2.3.4"}, unique_id=SERIAL @@ -95,17 +99,19 @@ async def test_light_unique_id(hass: HomeAssistant) -> None: await hass.async_block_till_done() entity_id = "light.my_bulb" - entity_registry = er.async_get(hass) assert entity_registry.async_get(entity_id).unique_id == SERIAL - device_registry = dr.async_get(hass) device = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, SERIAL)} ) assert device.identifiers == {(DOMAIN, SERIAL)} -async def test_light_unique_id_new_firmware(hass: HomeAssistant) -> None: +async def test_light_unique_id_new_firmware( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: """Test a light unique id with newer firmware.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "1.2.3.4"}, unique_id=SERIAL @@ -119,9 +125,7 @@ async def test_light_unique_id_new_firmware(hass: HomeAssistant) -> None: await hass.async_block_till_done() entity_id = "light.my_bulb" - entity_registry = er.async_get(hass) assert entity_registry.async_get(entity_id).unique_id == SERIAL - device_registry = dr.async_get(hass) device = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, ) @@ -1115,7 +1119,9 @@ async def test_white_bulb(hass: HomeAssistant) -> None: bulb.set_color.reset_mock() -async def test_config_zoned_light_strip_fails(hass: HomeAssistant) -> None: +async def test_config_zoned_light_strip_fails( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test we handle failure to update zones.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=SERIAL @@ -1144,7 +1150,6 @@ async def test_config_zoned_light_strip_fails(hass: HomeAssistant) -> None: with _patch_discovery(device=light_strip), _patch_device(device=light_strip): await async_setup_component(hass, lifx.DOMAIN, {lifx.DOMAIN: {}}) await hass.async_block_till_done() - entity_registry = er.async_get(hass) assert entity_registry.async_get(entity_id).unique_id == SERIAL assert hass.states.get(entity_id).state == STATE_OFF @@ -1153,7 +1158,9 @@ async def test_config_zoned_light_strip_fails(hass: HomeAssistant) -> None: assert hass.states.get(entity_id).state == STATE_UNAVAILABLE -async def test_legacy_zoned_light_strip(hass: HomeAssistant) -> None: +async def test_legacy_zoned_light_strip( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test we handle failure to update zones.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=SERIAL @@ -1183,7 +1190,6 @@ async def test_legacy_zoned_light_strip(hass: HomeAssistant) -> None: with _patch_discovery(device=light_strip), _patch_device(device=light_strip): await async_setup_component(hass, lifx.DOMAIN, {lifx.DOMAIN: {}}) await hass.async_block_till_done() - entity_registry = er.async_get(hass) assert entity_registry.async_get(entity_id).unique_id == SERIAL assert hass.states.get(entity_id).state == STATE_OFF # 1 to get the number of zones @@ -1197,7 +1203,9 @@ async def test_legacy_zoned_light_strip(hass: HomeAssistant) -> None: assert get_color_zones_mock.call_count == 5 -async def test_white_light_fails(hass: HomeAssistant) -> None: +async def test_white_light_fails( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test we handle failure to power on off.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=SERIAL @@ -1211,7 +1219,6 @@ async def test_white_light_fails(hass: HomeAssistant) -> None: with _patch_discovery(device=bulb), _patch_device(device=bulb): await async_setup_component(hass, lifx.DOMAIN, {lifx.DOMAIN: {}}) await hass.async_block_till_done() - entity_registry = er.async_get(hass) assert entity_registry.async_get(entity_id).unique_id == SERIAL assert hass.states.get(entity_id).state == STATE_OFF with pytest.raises(HomeAssistantError): diff --git a/tests/components/lifx/test_select.py b/tests/components/lifx/test_select.py index aa705418d55..529925be726 100644 --- a/tests/components/lifx/test_select.py +++ b/tests/components/lifx/test_select.py @@ -25,7 +25,9 @@ from . import ( from tests.common import MockConfigEntry, async_fire_time_changed -async def test_theme_select(hass: HomeAssistant) -> None: +async def test_theme_select( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test selecting a theme.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -46,7 +48,6 @@ async def test_theme_select(hass: HomeAssistant) -> None: entity_id = "select.my_bulb_theme" - entity_registry = er.async_get(hass) entity = entity_registry.async_get(entity_id) assert entity assert not entity.disabled @@ -62,7 +63,9 @@ async def test_theme_select(hass: HomeAssistant) -> None: bulb.set_extended_color_zones.reset_mock() -async def test_infrared_brightness(hass: HomeAssistant) -> None: +async def test_infrared_brightness( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test getting and setting infrared brightness.""" config_entry = MockConfigEntry( @@ -82,7 +85,6 @@ async def test_infrared_brightness(hass: HomeAssistant) -> None: unique_id = f"{SERIAL}_infrared_brightness" entity_id = "select.my_bulb_infrared_brightness" - entity_registry = er.async_get(hass) entity = entity_registry.async_get(entity_id) assert entity assert not entity.disabled diff --git a/tests/components/lifx/test_sensor.py b/tests/components/lifx/test_sensor.py index 5fe69c8dabc..e27bc0de3a8 100644 --- a/tests/components/lifx/test_sensor.py +++ b/tests/components/lifx/test_sensor.py @@ -31,7 +31,9 @@ from . import ( from tests.common import MockConfigEntry, async_fire_time_changed -async def test_rssi_sensor(hass: HomeAssistant) -> None: +async def test_rssi_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test LIFX RSSI sensor entity.""" config_entry = MockConfigEntry( @@ -49,7 +51,6 @@ async def test_rssi_sensor(hass: HomeAssistant) -> None: await hass.async_block_till_done() entity_id = "sensor.my_bulb_rssi" - entity_registry = er.async_get(hass) entry = entity_registry.entities.get(entity_id) assert entry @@ -82,7 +83,9 @@ async def test_rssi_sensor(hass: HomeAssistant) -> None: assert rssi.attributes["state_class"] == SensorStateClass.MEASUREMENT -async def test_rssi_sensor_old_firmware(hass: HomeAssistant) -> None: +async def test_rssi_sensor_old_firmware( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test LIFX RSSI sensor entity.""" config_entry = MockConfigEntry( @@ -100,7 +103,6 @@ async def test_rssi_sensor_old_firmware(hass: HomeAssistant) -> None: await hass.async_block_till_done() entity_id = "sensor.my_bulb_rssi" - entity_registry = er.async_get(hass) entry = entity_registry.entities.get(entity_id) assert entry diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index 675057899b0..962c5500f06 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -1444,6 +1444,7 @@ async def test_light_service_call_color_conversion( platform.ENTITIES.append(platform.MockLight("Test_legacy", STATE_ON)) platform.ENTITIES.append(platform.MockLight("Test_rgbw", STATE_ON)) platform.ENTITIES.append(platform.MockLight("Test_rgbww", STATE_ON)) + platform.ENTITIES.append(platform.MockLight("Test_temperature", STATE_ON)) entity0 = platform.ENTITIES[0] entity0.supported_color_modes = {light.ColorMode.HS} @@ -1470,6 +1471,9 @@ async def test_light_service_call_color_conversion( entity6 = platform.ENTITIES[6] entity6.supported_color_modes = {light.ColorMode.RGBWW} + entity7 = platform.ENTITIES[7] + entity7.supported_color_modes = {light.ColorMode.COLOR_TEMP} + assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1498,6 +1502,9 @@ async def test_light_service_call_color_conversion( state = hass.states.get(entity6.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.RGBWW] + state = hass.states.get(entity7.entity_id) + assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] + await hass.services.async_call( "light", "turn_on", @@ -1510,6 +1517,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 100, "hs_color": (240, 100), @@ -1530,6 +1538,8 @@ async def test_light_service_call_color_conversion( assert data == {"brightness": 255, "rgbw_color": (0, 0, 255, 0)} _, data = entity6.last_call("turn_on") assert data == {"brightness": 255, "rgbww_color": (0, 0, 255, 0, 0)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 255, "color_temp_kelvin": 1739, "color_temp": 575} await hass.services.async_call( "light", @@ -1543,6 +1553,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 100, "hs_color": (240, 0), @@ -1564,6 +1575,8 @@ async def test_light_service_call_color_conversion( _, data = entity6.last_call("turn_on") # The midpoint of the the white channels is warm, compensated by adding green + blue assert data == {"brightness": 255, "rgbww_color": (0, 76, 141, 255, 255)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 255, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( "light", @@ -1577,6 +1590,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 50, "rgb_color": (128, 0, 0), @@ -1597,6 +1611,8 @@ async def test_light_service_call_color_conversion( assert data == {"brightness": 128, "rgbw_color": (128, 0, 0, 0)} _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (128, 0, 0, 0, 0)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 128, "color_temp_kelvin": 6279, "color_temp": 159} await hass.services.async_call( "light", @@ -1610,6 +1626,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 50, "rgb_color": (255, 255, 255), @@ -1631,6 +1648,8 @@ async def test_light_service_call_color_conversion( _, data = entity6.last_call("turn_on") # The midpoint the the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 76, 141, 255, 255)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( "light", @@ -1644,6 +1663,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 50, "xy_color": (0.1, 0.8), @@ -1664,6 +1684,8 @@ async def test_light_service_call_color_conversion( assert data == {"brightness": 128, "rgbw_color": (0, 255, 22, 0)} _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (0, 255, 22, 0, 0)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 128, "color_temp_kelvin": 8645, "color_temp": 115} await hass.services.async_call( "light", @@ -1677,6 +1699,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 50, "xy_color": (0.323, 0.329), @@ -1698,6 +1721,8 @@ async def test_light_service_call_color_conversion( _, data = entity6.last_call("turn_on") # The midpoint the the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 75, 140, 255, 255)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( "light", @@ -1711,6 +1736,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 50, "rgbw_color": (128, 0, 0, 64), @@ -1732,6 +1758,8 @@ async def test_light_service_call_color_conversion( _, data = entity6.last_call("turn_on") # The midpoint the the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (128, 0, 30, 117, 117)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 128, "color_temp_kelvin": 3011, "color_temp": 332} await hass.services.async_call( "light", @@ -1745,6 +1773,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 50, "rgbw_color": (255, 255, 255, 255), @@ -1766,6 +1795,8 @@ async def test_light_service_call_color_conversion( _, data = entity6.last_call("turn_on") # The midpoint the the white channels is warm, compensated by adding green + blue assert data == {"brightness": 128, "rgbww_color": (0, 76, 141, 255, 255)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 128, "color_temp_kelvin": 5962, "color_temp": 167} await hass.services.async_call( "light", @@ -1779,6 +1810,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 50, "rgbww_color": (128, 0, 0, 64, 32), @@ -1799,6 +1831,8 @@ async def test_light_service_call_color_conversion( assert data == {"brightness": 128, "rgbw_color": (128, 9, 0, 33)} _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (128, 0, 0, 64, 32)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 128, "color_temp_kelvin": 3845, "color_temp": 260} await hass.services.async_call( "light", @@ -1812,6 +1846,7 @@ async def test_light_service_call_color_conversion( entity4.entity_id, entity5.entity_id, entity6.entity_id, + entity7.entity_id, ], "brightness_pct": 50, "rgbww_color": (255, 255, 255, 255, 255), @@ -1833,6 +1868,8 @@ async def test_light_service_call_color_conversion( assert data == {"brightness": 128, "rgbw_color": (96, 44, 0, 255)} _, data = entity6.last_call("turn_on") assert data == {"brightness": 128, "rgbww_color": (255, 255, 255, 255, 255)} + _, data = entity7.last_call("turn_on") + assert data == {"brightness": 128, "color_temp_kelvin": 3451, "color_temp": 289} async def test_light_service_call_color_conversion_named_tuple( diff --git a/tests/components/light/test_reproduce_state.py b/tests/components/light/test_reproduce_state.py index 816bde430e7..65b83aa0269 100644 --- a/tests/components/light/test_reproduce_state.py +++ b/tests/components/light/test_reproduce_state.py @@ -2,35 +2,24 @@ import pytest from homeassistant.components import light -from homeassistant.components.light.reproduce_state import DEPRECATION_WARNING from homeassistant.core import HomeAssistant, State from homeassistant.helpers.state import async_reproduce_state from tests.common import async_mock_service VALID_BRIGHTNESS = {"brightness": 180} -VALID_FLASH = {"flash": "short"} VALID_EFFECT = {"effect": "random"} -VALID_TRANSITION = {"transition": 15} -VALID_COLOR_NAME = {"color_name": "red"} VALID_COLOR_TEMP = {"color_temp": 240} VALID_HS_COLOR = {"hs_color": (345, 75)} -VALID_KELVIN = {"kelvin": 4000} -VALID_PROFILE = {"profile": "relax"} VALID_RGB_COLOR = {"rgb_color": (255, 63, 111)} VALID_RGBW_COLOR = {"rgbw_color": (255, 63, 111, 10)} VALID_RGBWW_COLOR = {"rgbww_color": (255, 63, 111, 10, 20)} VALID_XY_COLOR = {"xy_color": (0.59, 0.274)} NONE_BRIGHTNESS = {"brightness": None} -NONE_FLASH = {"flash": None} NONE_EFFECT = {"effect": None} -NONE_TRANSITION = {"transition": None} -NONE_COLOR_NAME = {"color_name": None} NONE_COLOR_TEMP = {"color_temp": None} NONE_HS_COLOR = {"hs_color": None} -NONE_KELVIN = {"kelvin": None} -NONE_PROFILE = {"profile": None} NONE_RGB_COLOR = {"rgb_color": None} NONE_RGBW_COLOR = {"rgbw_color": None} NONE_RGBWW_COLOR = {"rgbww_color": None} @@ -43,14 +32,9 @@ async def test_reproducing_states( """Test reproducing Light states.""" hass.states.async_set("light.entity_off", "off", {}) hass.states.async_set("light.entity_bright", "on", VALID_BRIGHTNESS) - hass.states.async_set("light.entity_flash", "on", VALID_FLASH) hass.states.async_set("light.entity_effect", "on", VALID_EFFECT) - hass.states.async_set("light.entity_trans", "on", VALID_TRANSITION) - hass.states.async_set("light.entity_name", "on", VALID_COLOR_NAME) hass.states.async_set("light.entity_temp", "on", VALID_COLOR_TEMP) hass.states.async_set("light.entity_hs", "on", VALID_HS_COLOR) - hass.states.async_set("light.entity_kelvin", "on", VALID_KELVIN) - hass.states.async_set("light.entity_profile", "on", VALID_PROFILE) hass.states.async_set("light.entity_rgb", "on", VALID_RGB_COLOR) hass.states.async_set("light.entity_xy", "on", VALID_XY_COLOR) @@ -63,14 +47,9 @@ async def test_reproducing_states( [ State("light.entity_off", "off"), State("light.entity_bright", "on", VALID_BRIGHTNESS), - State("light.entity_flash", "on", VALID_FLASH), State("light.entity_effect", "on", VALID_EFFECT), - State("light.entity_trans", "on", VALID_TRANSITION), - State("light.entity_name", "on", VALID_COLOR_NAME), State("light.entity_temp", "on", VALID_COLOR_TEMP), State("light.entity_hs", "on", VALID_HS_COLOR), - State("light.entity_kelvin", "on", VALID_KELVIN), - State("light.entity_profile", "on", VALID_PROFILE), State("light.entity_rgb", "on", VALID_RGB_COLOR), State("light.entity_xy", "on", VALID_XY_COLOR), ], @@ -92,20 +71,15 @@ async def test_reproducing_states( [ State("light.entity_xy", "off"), State("light.entity_off", "on", VALID_BRIGHTNESS), - State("light.entity_bright", "on", VALID_FLASH), - State("light.entity_flash", "on", VALID_EFFECT), - State("light.entity_effect", "on", VALID_TRANSITION), - State("light.entity_trans", "on", VALID_COLOR_NAME), - State("light.entity_name", "on", VALID_COLOR_TEMP), + State("light.entity_bright", "on", VALID_EFFECT), + State("light.entity_effect", "on", VALID_COLOR_TEMP), State("light.entity_temp", "on", VALID_HS_COLOR), - State("light.entity_hs", "on", VALID_KELVIN), - State("light.entity_kelvin", "on", VALID_PROFILE), - State("light.entity_profile", "on", VALID_RGB_COLOR), + State("light.entity_hs", "on", VALID_RGB_COLOR), State("light.entity_rgb", "on", VALID_XY_COLOR), ], ) - assert len(turn_on_calls) == 11 + assert len(turn_on_calls) == 6 expected_calls = [] @@ -113,42 +87,22 @@ async def test_reproducing_states( expected_off["entity_id"] = "light.entity_off" expected_calls.append(expected_off) - expected_bright = dict(VALID_FLASH) + expected_bright = dict(VALID_EFFECT) expected_bright["entity_id"] = "light.entity_bright" expected_calls.append(expected_bright) - expected_flash = dict(VALID_EFFECT) - expected_flash["entity_id"] = "light.entity_flash" - expected_calls.append(expected_flash) - - expected_effect = dict(VALID_TRANSITION) + expected_effect = dict(VALID_COLOR_TEMP) expected_effect["entity_id"] = "light.entity_effect" expected_calls.append(expected_effect) - expected_trans = dict(VALID_COLOR_NAME) - expected_trans["entity_id"] = "light.entity_trans" - expected_calls.append(expected_trans) - - expected_name = dict(VALID_COLOR_TEMP) - expected_name["entity_id"] = "light.entity_name" - expected_calls.append(expected_name) - expected_temp = dict(VALID_HS_COLOR) expected_temp["entity_id"] = "light.entity_temp" expected_calls.append(expected_temp) - expected_hs = dict(VALID_KELVIN) + expected_hs = dict(VALID_RGB_COLOR) expected_hs["entity_id"] = "light.entity_hs" expected_calls.append(expected_hs) - expected_kelvin = dict(VALID_PROFILE) - expected_kelvin["entity_id"] = "light.entity_kelvin" - expected_calls.append(expected_kelvin) - - expected_profile = dict(VALID_RGB_COLOR) - expected_profile["entity_id"] = "light.entity_profile" - expected_calls.append(expected_profile) - expected_rgb = dict(VALID_XY_COLOR) expected_rgb["entity_id"] = "light.entity_rgb" expected_calls.append(expected_rgb) @@ -191,10 +145,8 @@ async def test_filter_color_modes( """Test filtering of parameters according to color mode.""" hass.states.async_set("light.entity", "off", {}) all_colors = { - **VALID_COLOR_NAME, **VALID_COLOR_TEMP, **VALID_HS_COLOR, - **VALID_KELVIN, **VALID_RGB_COLOR, **VALID_RGBW_COLOR, **VALID_RGBWW_COLOR, @@ -240,31 +192,13 @@ async def test_filter_color_modes( assert len(turn_on_calls) == 1 -async def test_deprecation_warning( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test deprecation warning.""" - hass.states.async_set("light.entity_off", "off", {}) - turn_on_calls = async_mock_service(hass, "light", "turn_on") - await async_reproduce_state( - hass, [State("light.entity_off", "on", {"brightness_pct": 80})] - ) - assert len(turn_on_calls) == 1 - assert DEPRECATION_WARNING % ["brightness_pct"] in caplog.text - - @pytest.mark.parametrize( "saved_state", ( NONE_BRIGHTNESS, - NONE_FLASH, NONE_EFFECT, - NONE_TRANSITION, - NONE_COLOR_NAME, NONE_COLOR_TEMP, NONE_HS_COLOR, - NONE_KELVIN, - NONE_PROFILE, NONE_RGB_COLOR, NONE_RGBW_COLOR, NONE_RGBWW_COLOR, diff --git a/tests/components/linear_garage_door/__init__.py b/tests/components/linear_garage_door/__init__.py new file mode 100644 index 00000000000..e5abc6c943c --- /dev/null +++ b/tests/components/linear_garage_door/__init__.py @@ -0,0 +1 @@ +"""Tests for the Linear Garage Door integration.""" diff --git a/tests/components/linear_garage_door/test_config_flow.py b/tests/components/linear_garage_door/test_config_flow.py new file mode 100644 index 00000000000..64664745c54 --- /dev/null +++ b/tests/components/linear_garage_door/test_config_flow.py @@ -0,0 +1,163 @@ +"""Test the Linear Garage Door config flow.""" + +from unittest.mock import patch + +from linear_garage_door.errors import InvalidLoginError + +from homeassistant import config_entries +from homeassistant.components.linear_garage_door.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .util import async_init_integration + + +async def test_form(hass: HomeAssistant) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["errors"] is None + + with patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.login", + return_value=True, + ), patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.get_sites", + return_value=[{"id": "test-site-id", "name": "test-site-name"}], + ), patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.close", + return_value=None, + ), patch( + "uuid.uuid4", + return_value="test-uuid", + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "email": "test-email", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + + with patch( + "homeassistant.components.linear_garage_door.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], {"site": "test-site-id"} + ) + await hass.async_block_till_done() + + assert result3["type"] == FlowResultType.CREATE_ENTRY + assert result3["title"] == "test-site-name" + assert result3["data"] == { + "email": "test-email", + "password": "test-password", + "site_id": "test-site-id", + "device_id": "test-uuid", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_reauth(hass: HomeAssistant) -> None: + """Test reauthentication.""" + + entry = await async_init_integration(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + "title_placeholders": {"name": entry.title}, + "unique_id": entry.unique_id, + }, + data=entry.data, + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.login", + return_value=True, + ), patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.get_sites", + return_value=[{"id": "test-site-id", "name": "test-site-name"}], + ), patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.close", + return_value=None, + ), patch( + "uuid.uuid4", + return_value="test-uuid", + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "email": "new-email", + "password": "new-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] == FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + + entries = hass.config_entries.async_entries() + assert len(entries) == 1 + assert entries[0].data == { + "email": "new-email", + "password": "new-password", + "site_id": "test-site-id", + "device_id": "test-uuid", + } + + +async def test_form_invalid_login(hass: HomeAssistant) -> None: + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.login", + side_effect=InvalidLoginError, + ), patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.close", + return_value=None, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "email": "test-email", + "password": "test-password", + }, + ) + + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} + + +async def test_form_exception(hass: HomeAssistant) -> None: + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + ) + + with patch( + "homeassistant.components.linear_garage_door.config_flow.Linear.login", + side_effect=Exception, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "email": "test-email", + "password": "test-password", + }, + ) + + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} diff --git a/tests/components/linear_garage_door/test_coordinator.py b/tests/components/linear_garage_door/test_coordinator.py new file mode 100644 index 00000000000..fc3087db354 --- /dev/null +++ b/tests/components/linear_garage_door/test_coordinator.py @@ -0,0 +1,99 @@ +"""Test data update coordinator for Linear Garage Door.""" + +from unittest.mock import patch + +from linear_garage_door.errors import InvalidLoginError, ResponseError + +from homeassistant.components.linear_garage_door.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_invalid_password( + hass: HomeAssistant, +) -> None: + """Test invalid password.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + "email": "test-email", + "password": "test-password", + "site_id": "test-site-id", + "device_id": "test-uuid", + }, + ) + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.login", + side_effect=InvalidLoginError( + "Login provided is invalid, please check the email and password" + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries + assert len(entries) == 1 + assert entries[0].state == ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) + assert flows + assert len(flows) == 1 + assert flows[0]["context"]["source"] == "reauth" + + +async def test_response_error(hass: HomeAssistant) -> None: + """Test response error.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + "email": "test-email", + "password": "test-password", + "site_id": "test-site-id", + "device_id": "test-uuid", + }, + ) + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.login", + side_effect=ResponseError, + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries + assert len(entries) == 1 + assert entries[0].state == ConfigEntryState.SETUP_RETRY + + +async def test_invalid_login( + hass: HomeAssistant, +) -> None: + """Test invalid login.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + "email": "test-email", + "password": "test-password", + "site_id": "test-site-id", + "device_id": "test-uuid", + }, + ) + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.login", + side_effect=InvalidLoginError("Some other error"), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries + assert len(entries) == 1 + assert entries[0].state == ConfigEntryState.SETUP_RETRY diff --git a/tests/components/linear_garage_door/test_cover.py b/tests/components/linear_garage_door/test_cover.py new file mode 100644 index 00000000000..428411d39e0 --- /dev/null +++ b/tests/components/linear_garage_door/test_cover.py @@ -0,0 +1,187 @@ +"""Test Linear Garage Door cover.""" + +from datetime import timedelta +from unittest.mock import patch + +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.components.linear_garage_door.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .util import async_init_integration + +from tests.common import async_fire_time_changed + + +async def test_data(hass: HomeAssistant) -> None: + """Test that data gets parsed and returned appropriately.""" + + await async_init_integration(hass) + + assert hass.data[DOMAIN] + entries = hass.config_entries.async_entries(DOMAIN) + assert entries + assert len(entries) == 1 + assert entries[0].state == ConfigEntryState.LOADED + assert hass.states.get("cover.test_garage_1").state == STATE_OPEN + assert hass.states.get("cover.test_garage_2").state == STATE_CLOSED + assert hass.states.get("cover.test_garage_3").state == STATE_OPENING + assert hass.states.get("cover.test_garage_4").state == STATE_CLOSING + + +async def test_open_cover(hass: HomeAssistant) -> None: + """Test that opening the cover works as intended.""" + + await async_init_integration(hass) + + with patch( + "homeassistant.components.linear_garage_door.cover.Linear.operate_device" + ) as operate_device: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_1"}, + blocking=True, + ) + + assert operate_device.call_count == 0 + + with patch( + "homeassistant.components.linear_garage_door.cover.Linear.login", + return_value=True, + ), patch( + "homeassistant.components.linear_garage_door.cover.Linear.operate_device", + return_value=None, + ) as operate_device, patch( + "homeassistant.components.linear_garage_door.cover.Linear.close", + return_value=True, + ): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_2"}, + blocking=True, + ) + + assert operate_device.call_count == 1 + with patch( + "homeassistant.components.linear_garage_door.cover.Linear.login", + return_value=True, + ), patch( + "homeassistant.components.linear_garage_door.cover.Linear.get_devices", + return_value=[ + {"id": "test1", "name": "Test Garage 1", "subdevices": ["GDO", "Light"]}, + {"id": "test2", "name": "Test Garage 2", "subdevices": ["GDO", "Light"]}, + ], + ), patch( + "homeassistant.components.linear_garage_door.cover.Linear.get_device_state", + side_effect=lambda id: { + "test1": { + "GDO": {"Open_B": "true", "Open_P": "100"}, + "Light": {"On_B": "true", "On_P": "100"}, + }, + "test2": { + "GDO": {"Open_B": "false", "Opening_P": "0"}, + "Light": {"On_B": "false", "On_P": "0"}, + }, + "test3": { + "GDO": {"Open_B": "false", "Opening_P": "0"}, + "Light": {"On_B": "false", "On_P": "0"}, + }, + "test4": { + "GDO": {"Open_B": "true", "Opening_P": "100"}, + "Light": {"On_B": "true", "On_P": "100"}, + }, + }[id], + ), patch( + "homeassistant.components.linear_garage_door.cover.Linear.close", + return_value=True, + ): + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=60)) + await hass.async_block_till_done() + + assert hass.states.get("cover.test_garage_2").state == STATE_OPENING + + +async def test_close_cover(hass: HomeAssistant) -> None: + """Test that closing the cover works as intended.""" + + await async_init_integration(hass) + + with patch( + "homeassistant.components.linear_garage_door.cover.Linear.operate_device" + ) as operate_device: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_2"}, + blocking=True, + ) + + assert operate_device.call_count == 0 + + with patch( + "homeassistant.components.linear_garage_door.cover.Linear.login", + return_value=True, + ), patch( + "homeassistant.components.linear_garage_door.cover.Linear.operate_device", + return_value=None, + ) as operate_device, patch( + "homeassistant.components.linear_garage_door.cover.Linear.close", + return_value=True, + ): + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_1"}, + blocking=True, + ) + + assert operate_device.call_count == 1 + with patch( + "homeassistant.components.linear_garage_door.cover.Linear.login", + return_value=True, + ), patch( + "homeassistant.components.linear_garage_door.cover.Linear.get_devices", + return_value=[ + {"id": "test1", "name": "Test Garage 1", "subdevices": ["GDO", "Light"]}, + {"id": "test2", "name": "Test Garage 2", "subdevices": ["GDO", "Light"]}, + ], + ), patch( + "homeassistant.components.linear_garage_door.cover.Linear.get_device_state", + side_effect=lambda id: { + "test1": { + "GDO": {"Open_B": "true", "Opening_P": "100"}, + "Light": {"On_B": "true", "On_P": "100"}, + }, + "test2": { + "GDO": {"Open_B": "false", "Open_P": "0"}, + "Light": {"On_B": "false", "On_P": "0"}, + }, + "test3": { + "GDO": {"Open_B": "false", "Opening_P": "0"}, + "Light": {"On_B": "false", "On_P": "0"}, + }, + "test4": { + "GDO": {"Open_B": "true", "Opening_P": "100"}, + "Light": {"On_B": "true", "On_P": "100"}, + }, + }[id], + ), patch( + "homeassistant.components.linear_garage_door.cover.Linear.close", + return_value=True, + ): + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=60)) + await hass.async_block_till_done() + + assert hass.states.get("cover.test_garage_1").state == STATE_CLOSING diff --git a/tests/components/linear_garage_door/test_diagnostics.py b/tests/components/linear_garage_door/test_diagnostics.py new file mode 100644 index 00000000000..0650196d619 --- /dev/null +++ b/tests/components/linear_garage_door/test_diagnostics.py @@ -0,0 +1,53 @@ +"""Test diagnostics of Linear Garage Door.""" + +from homeassistant.core import HomeAssistant + +from .util import async_init_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, hass_client: ClientSessionGenerator +) -> None: + """Test config entry diagnostics.""" + entry = await async_init_integration(hass) + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert result["entry"]["data"] == { + "email": "**REDACTED**", + "password": "**REDACTED**", + "site_id": "test-site-id", + "device_id": "test-uuid", + } + assert result["coordinator_data"] == { + "test1": { + "name": "Test Garage 1", + "subdevices": { + "GDO": {"Open_B": "true", "Open_P": "100"}, + "Light": {"On_B": "true", "On_P": "100"}, + }, + }, + "test2": { + "name": "Test Garage 2", + "subdevices": { + "GDO": {"Open_B": "false", "Open_P": "0"}, + "Light": {"On_B": "false", "On_P": "0"}, + }, + }, + "test3": { + "name": "Test Garage 3", + "subdevices": { + "GDO": {"Open_B": "false", "Opening_P": "0"}, + "Light": {"On_B": "false", "On_P": "0"}, + }, + }, + "test4": { + "name": "Test Garage 4", + "subdevices": { + "GDO": {"Open_B": "true", "Opening_P": "100"}, + "Light": {"On_B": "true", "On_P": "100"}, + }, + }, + } diff --git a/tests/components/linear_garage_door/test_init.py b/tests/components/linear_garage_door/test_init.py new file mode 100644 index 00000000000..e8d76770050 --- /dev/null +++ b/tests/components/linear_garage_door/test_init.py @@ -0,0 +1,59 @@ +"""Test Linear Garage Door init.""" + +from unittest.mock import patch + +from homeassistant.components.linear_garage_door.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant) -> None: + """Test the unload entry.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + "email": "test-email", + "password": "test-password", + "site_id": "test-site-id", + "device_id": "test-uuid", + }, + ) + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.login", + return_value=True, + ), patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.get_devices", + return_value=[ + {"id": "test", "name": "Test Garage", "subdevices": ["GDO", "Light"]} + ], + ), patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.get_device_state", + return_value={ + "GDO": {"Open_B": "true", "Open_P": "100"}, + "Light": {"On_B": "true", "On_P": "10"}, + }, + ), patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.close", + return_value=True, + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.data[DOMAIN] + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries + assert len(entries) == 1 + assert entries[0].state == ConfigEntryState.LOADED + + with patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.close", + return_value=True, + ): + await hass.config_entries.async_unload(entries[0].entry_id) + await hass.async_block_till_done() + assert entries[0].state == ConfigEntryState.NOT_LOADED diff --git a/tests/components/linear_garage_door/util.py b/tests/components/linear_garage_door/util.py new file mode 100644 index 00000000000..d8348b9bb64 --- /dev/null +++ b/tests/components/linear_garage_door/util.py @@ -0,0 +1,62 @@ +"""Utilities for Linear Garage Door testing.""" + +from unittest.mock import patch + +from homeassistant.components.linear_garage_door.const import DOMAIN +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry: + """Initialize mock integration.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + "email": "test-email", + "password": "test-password", + "site_id": "test-site-id", + "device_id": "test-uuid", + }, + ) + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.login", + return_value=True, + ), patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.get_devices", + return_value=[ + {"id": "test1", "name": "Test Garage 1", "subdevices": ["GDO", "Light"]}, + {"id": "test2", "name": "Test Garage 2", "subdevices": ["GDO", "Light"]}, + {"id": "test3", "name": "Test Garage 3", "subdevices": ["GDO", "Light"]}, + {"id": "test4", "name": "Test Garage 4", "subdevices": ["GDO", "Light"]}, + ], + ), patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.get_device_state", + side_effect=lambda id: { + "test1": { + "GDO": {"Open_B": "true", "Open_P": "100"}, + "Light": {"On_B": "true", "On_P": "100"}, + }, + "test2": { + "GDO": {"Open_B": "false", "Open_P": "0"}, + "Light": {"On_B": "false", "On_P": "0"}, + }, + "test3": { + "GDO": {"Open_B": "false", "Opening_P": "0"}, + "Light": {"On_B": "false", "On_P": "0"}, + }, + "test4": { + "GDO": {"Open_B": "true", "Opening_P": "100"}, + "Light": {"On_B": "true", "On_P": "100"}, + }, + }[id], + ), patch( + "homeassistant.components.linear_garage_door.coordinator.Linear.close", + return_value=True, + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/litejet/test_scene.py b/tests/components/litejet/test_scene.py index d1316d81bbe..76c1556f66d 100644 --- a/tests/components/litejet/test_scene.py +++ b/tests/components/litejet/test_scene.py @@ -17,16 +17,16 @@ ENTITY_OTHER_SCENE = "scene.litejet_mock_scene_2" ENTITY_OTHER_SCENE_NUMBER = 2 -async def test_disabled_by_default(hass: HomeAssistant, mock_litejet) -> None: +async def test_disabled_by_default( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_litejet +) -> None: """Test the scene is disabled by default.""" await async_init_integration(hass) - registry = er.async_get(hass) - state = hass.states.get(ENTITY_SCENE) assert state is None - entry = registry.async_get(ENTITY_SCENE) + entry = entity_registry.async_get(ENTITY_SCENE) assert entry assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION diff --git a/tests/components/litterrobot/test_button.py b/tests/components/litterrobot/test_button.py index a17c0439824..9a4145dd224 100644 --- a/tests/components/litterrobot/test_button.py +++ b/tests/components/litterrobot/test_button.py @@ -13,10 +13,11 @@ from .conftest import setup_integration BUTTON_ENTITY = "button.test_reset_waste_drawer" -async def test_button(hass: HomeAssistant, mock_account: MagicMock) -> None: +async def test_button( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_account: MagicMock +) -> None: """Test the creation and values of the Litter-Robot button.""" await setup_integration(hass, mock_account, BUTTON_DOMAIN) - entity_registry = er.async_get(hass) state = hass.states.get(BUTTON_ENTITY) assert state diff --git a/tests/components/litterrobot/test_init.py b/tests/components/litterrobot/test_init.py index 170d6313029..25c47ee4945 100644 --- a/tests/components/litterrobot/test_init.py +++ b/tests/components/litterrobot/test_init.py @@ -14,7 +14,6 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component from .common import CONFIG, VACUUM_ENTITY_ID, remove_device @@ -73,17 +72,19 @@ async def test_entry_not_setup( async def test_device_remove_devices( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, mock_account: MagicMock + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mock_account: MagicMock, ) -> None: """Test we can only remove a device that no longer exists.""" assert await async_setup_component(hass, "config", {}) config_entry = await setup_integration(hass, mock_account, VACUUM_DOMAIN) - registry: EntityRegistry = er.async_get(hass) - entity = registry.entities[VACUUM_ENTITY_ID] + entity = entity_registry.entities[VACUUM_ENTITY_ID] assert entity.unique_id == "LR3C012345-litter_box" - device_registry = dr.async_get(hass) device_entry = device_registry.async_get(entity.device_id) assert ( await remove_device( diff --git a/tests/components/litterrobot/test_vacuum.py b/tests/components/litterrobot/test_vacuum.py index 3aee7b5075f..fe77119ca5e 100644 --- a/tests/components/litterrobot/test_vacuum.py +++ b/tests/components/litterrobot/test_vacuum.py @@ -32,21 +32,22 @@ COMPONENT_SERVICE_DOMAIN = { } -async def test_vacuum(hass: HomeAssistant, mock_account: MagicMock) -> None: +async def test_vacuum( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_account: MagicMock +) -> None: """Tests the vacuum entity was set up.""" - ent_reg = er.async_get(hass) - ent_reg.async_get_or_create( + entity_registry.async_get_or_create( PLATFORM_DOMAIN, DOMAIN, VACUUM_UNIQUE_ID, suggested_object_id=VACUUM_ENTITY_ID.replace(PLATFORM_DOMAIN, ""), ) - ent_reg_entry = ent_reg.async_get(VACUUM_ENTITY_ID) + ent_reg_entry = entity_registry.async_get(VACUUM_ENTITY_ID) assert ent_reg_entry.unique_id == VACUUM_UNIQUE_ID await setup_integration(hass, mock_account, PLATFORM_DOMAIN) - assert len(ent_reg.entities) == 1 + assert len(entity_registry.entities) == 1 assert hass.services.has_service(DOMAIN, SERVICE_SET_SLEEP_MODE) vacuum = hass.states.get(VACUUM_ENTITY_ID) @@ -54,7 +55,7 @@ async def test_vacuum(hass: HomeAssistant, mock_account: MagicMock) -> None: assert vacuum.state == STATE_DOCKED assert vacuum.attributes["is_sleeping"] is False - ent_reg_entry = ent_reg.async_get(VACUUM_ENTITY_ID) + ent_reg_entry = entity_registry.async_get(VACUUM_ENTITY_ID) assert ent_reg_entry.unique_id == VACUUM_UNIQUE_ID @@ -70,15 +71,16 @@ async def test_vacuum_status_when_sleeping( async def test_no_robots( - hass: HomeAssistant, mock_account_with_no_robots: MagicMock + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_account_with_no_robots: MagicMock, ) -> None: """Tests the vacuum entity was set up.""" entry = await setup_integration(hass, mock_account_with_no_robots, PLATFORM_DOMAIN) assert not hass.services.has_service(DOMAIN, SERVICE_SET_SLEEP_MODE) - ent_reg = er.async_get(hass) - assert len(ent_reg.entities) == 0 + assert len(entity_registry.entities) == 0 assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/local_todo/test_todo.py b/tests/components/local_todo/test_todo.py index 39e9264d45a..67d0703ca7c 100644 --- a/tests/components/local_todo/test_todo.py +++ b/tests/components/local_todo/test_todo.py @@ -2,6 +2,7 @@ from collections.abc import Awaitable, Callable import textwrap +from typing import Any import pytest @@ -13,39 +14,22 @@ from .conftest import TEST_ENTITY from tests.typing import WebSocketGenerator -@pytest.fixture -def ws_req_id() -> Callable[[], int]: - """Fixture for incremental websocket requests.""" - - id = 0 - - def next() -> int: - nonlocal id - id += 1 - return id - - return next - - @pytest.fixture async def ws_get_items( - hass_ws_client: WebSocketGenerator, ws_req_id: Callable[[], int] + hass_ws_client: WebSocketGenerator, ) -> Callable[[], Awaitable[dict[str, str]]]: """Fixture to fetch items from the todo websocket.""" async def get() -> list[dict[str, str]]: # Fetch items using To-do platform client = await hass_ws_client() - id = ws_req_id() - await client.send_json( + await client.send_json_auto_id( { - "id": id, "type": "todo/item/list", "entity_id": TEST_ENTITY, } ) resp = await client.receive_json() - assert resp.get("id") == id assert resp.get("success") return resp.get("result", {}).get("items", []) @@ -55,35 +39,51 @@ async def ws_get_items( @pytest.fixture async def ws_move_item( hass_ws_client: WebSocketGenerator, - ws_req_id: Callable[[], int], ) -> Callable[[str, str | None], Awaitable[None]]: """Fixture to move an item in the todo list.""" async def move(uid: str, previous_uid: str | None) -> None: # Fetch items using To-do platform client = await hass_ws_client() - id = ws_req_id() data = { - "id": id, "type": "todo/item/move", "entity_id": TEST_ENTITY, "uid": uid, } if previous_uid is not None: data["previous_uid"] = previous_uid - await client.send_json(data) + await client.send_json_auto_id(data) resp = await client.receive_json() - assert resp.get("id") == id assert resp.get("success") return move +@pytest.fixture(autouse=True) +def set_time_zone(hass: HomeAssistant) -> None: + """Set the time zone for the tests that keesp UTC-6 all year round.""" + hass.config.set_time_zone("America/Regina") + + +@pytest.mark.parametrize( + ("item_data", "expected_item_data"), + [ + ({}, {}), + ({"due_date": "2023-11-17"}, {"due": "2023-11-17"}), + ( + {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {"due": "2023-11-17T05:30:00-06:00"}, + ), + ({"description": "Additional detail"}, {"description": "Additional detail"}), + ], +) async def test_add_item( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup_integration: None, ws_get_items: Callable[[], Awaitable[dict[str, str]]], + item_data: dict[str, Any], + expected_item_data: dict[str, Any], ) -> None: """Test adding a todo item.""" @@ -94,7 +94,7 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, "add_item", - {"item": "replace batteries"}, + {"item": "replace batteries", **item_data}, target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -103,6 +103,8 @@ async def test_add_item( assert len(items) == 1 assert items[0]["summary"] == "replace batteries" assert items[0]["status"] == "needs_action" + for k, v in expected_item_data.items(): + assert items[0][k] == v assert "uid" in items[0] state = hass.states.get(TEST_ENTITY) @@ -110,16 +112,30 @@ async def test_add_item( assert state.state == "1" +@pytest.mark.parametrize( + ("item_data", "expected_item_data"), + [ + ({}, {}), + ({"due_date": "2023-11-17"}, {"due": "2023-11-17"}), + ( + {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {"due": "2023-11-17T05:30:00-06:00"}, + ), + ({"description": "Additional detail"}, {"description": "Additional detail"}), + ], +) async def test_remove_item( hass: HomeAssistant, setup_integration: None, ws_get_items: Callable[[], Awaitable[dict[str, str]]], + item_data: dict[str, Any], + expected_item_data: dict[str, Any], ) -> None: """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, "add_item", - {"item": "replace batteries"}, + {"item": "replace batteries", **item_data}, target={"entity_id": TEST_ENTITY}, blocking=True, ) @@ -128,6 +144,8 @@ async def test_remove_item( assert len(items) == 1 assert items[0]["summary"] == "replace batteries" assert items[0]["status"] == "needs_action" + for k, v in expected_item_data.items(): + assert items[0][k] == v assert "uid" in items[0] state = hass.states.get(TEST_ENTITY) @@ -189,10 +207,30 @@ async def test_bulk_remove( assert state.state == "0" +@pytest.mark.parametrize( + ("item_data", "expected_item_data", "expected_state"), + [ + ({"status": "completed"}, {"status": "completed"}, "0"), + ({"due_date": "2023-11-17"}, {"due": "2023-11-17"}, "1"), + ( + {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {"due": "2023-11-17T05:30:00-06:00"}, + "1", + ), + ( + {"description": "Additional detail"}, + {"description": "Additional detail"}, + "1", + ), + ], +) async def test_update_item( hass: HomeAssistant, setup_integration: None, ws_get_items: Callable[[], Awaitable[dict[str, str]]], + item_data: dict[str, Any], + expected_item_data: dict[str, Any], + expected_state: str, ) -> None: """Test updating a todo item.""" @@ -220,21 +258,70 @@ async def test_update_item( await hass.services.async_call( TODO_DOMAIN, "update_item", - {"item": item["uid"], "status": "completed"}, + {"item": item["uid"], **item_data}, target={"entity_id": TEST_ENTITY}, blocking=True, ) - # Verify item is marked as completed + # Verify item is updated items = await ws_get_items() assert len(items) == 1 item = items[0] assert item["summary"] == "soda" - assert item["status"] == "completed" + for k, v in expected_item_data.items(): + assert items[0][k] == v state = hass.states.get(TEST_ENTITY) assert state - assert state.state == "0" + assert state.state == expected_state + + +async def test_rename( + hass: HomeAssistant, + setup_integration: None, + ws_get_items: Callable[[], Awaitable[dict[str, str]]], +) -> None: + """Test renaming a todo item.""" + + # Create new item + await hass.services.async_call( + TODO_DOMAIN, + "add_item", + {"item": "soda"}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + # Fetch item + items = await ws_get_items() + assert len(items) == 1 + item = items[0] + assert item["summary"] == "soda" + assert item["status"] == "needs_action" + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == "1" + + # Rename item + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + {"item": item["uid"], "rename": "water"}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + # Verify item has been renamed + items = await ws_get_items() + assert len(items) == 1 + item = items[0] + assert item["summary"] == "water" + assert item["status"] == "needs_action" + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == "1" @pytest.mark.parametrize( @@ -418,3 +505,64 @@ async def test_parse_existing_ics( state = hass.states.get(TEST_ENTITY) assert state assert state.state == expected_state + + +async def test_susbcribe( + hass: HomeAssistant, + setup_integration: None, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test subscribing to item updates.""" + + # Create new item + await hass.services.async_call( + TODO_DOMAIN, + "add_item", + {"item": "soda"}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + # Subscribe and get the initial list + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "todo/item/subscribe", + "entity_id": TEST_ENTITY, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + subscription_id = msg["id"] + + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "soda" + assert items[0]["status"] == "needs_action" + uid = items[0]["uid"] + assert uid + + # Rename item + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + {"item": uid, "rename": "milk"}, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + # Verify update is published + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "milk" + assert items[0]["status"] == "needs_action" + assert "uid" in items[0] diff --git a/tests/components/lock/test_init.py b/tests/components/lock/test_init.py index 24b13d48a1e..16f40fda786 100644 --- a/tests/components/lock/test_init.py +++ b/tests/components/lock/test_init.py @@ -42,6 +42,8 @@ class MockLockEntity(LockEntity): ) -> None: """Initialize mock lock entity.""" self._attr_supported_features = supported_features + self.calls_lock = MagicMock() + self.calls_unlock = MagicMock() self.calls_open = MagicMock() if code_format is not None: self._attr_code_format = code_format @@ -49,11 +51,13 @@ class MockLockEntity(LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the lock.""" + self.calls_lock(kwargs) self._attr_is_locking = False self._attr_is_locked = True async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" + self.calls_unlock(kwargs) self._attr_is_unlocking = False self._attr_is_locked = False @@ -103,10 +107,10 @@ async def test_lock_states(hass: HomeAssistant) -> None: async def test_set_default_code_option( hass: HomeAssistant, + entity_registry: er.EntityRegistry, enable_custom_integrations: None, ) -> None: """Test default code stored in the registry.""" - entity_registry = er.async_get(hass) entry = entity_registry.async_get_or_create("lock", "test", "very_unique") await hass.async_block_till_done() @@ -134,10 +138,10 @@ async def test_set_default_code_option( async def test_default_code_option_update( hass: HomeAssistant, + entity_registry: er.EntityRegistry, enable_custom_integrations: None, ) -> None: """Test default code stored in the registry is updated.""" - entity_registry = er.async_get(hass) entry = entity_registry.async_get_or_create("lock", "test", "very_unique") await hass.async_block_till_done() @@ -232,6 +236,50 @@ async def test_lock_unlock_with_code(hass: HomeAssistant) -> None: assert not lock.is_locked +async def test_lock_with_illegal_code(hass: HomeAssistant) -> None: + """Test lock entity with default code that does not match the code format.""" + lock = MockLockEntity( + code_format=r"^\d{4}$", + supported_features=LockEntityFeature.OPEN, + ) + lock.hass = hass + + with pytest.raises(ValueError): + await _async_open( + lock, ServiceCall(DOMAIN, SERVICE_OPEN, {ATTR_CODE: "123456"}) + ) + with pytest.raises(ValueError): + await _async_lock( + lock, ServiceCall(DOMAIN, SERVICE_LOCK, {ATTR_CODE: "123456"}) + ) + with pytest.raises(ValueError): + await _async_unlock( + lock, ServiceCall(DOMAIN, SERVICE_UNLOCK, {ATTR_CODE: "123456"}) + ) + + +async def test_lock_with_no_code(hass: HomeAssistant) -> None: + """Test lock entity with default code that does not match the code format.""" + lock = MockLockEntity( + supported_features=LockEntityFeature.OPEN, + ) + lock.hass = hass + + await _async_open(lock, ServiceCall(DOMAIN, SERVICE_OPEN, {})) + lock.calls_open.assert_called_with({}) + await _async_lock(lock, ServiceCall(DOMAIN, SERVICE_LOCK, {})) + lock.calls_lock.assert_called_with({}) + await _async_unlock(lock, ServiceCall(DOMAIN, SERVICE_UNLOCK, {})) + lock.calls_unlock.assert_called_with({}) + + await _async_open(lock, ServiceCall(DOMAIN, SERVICE_OPEN, {ATTR_CODE: ""})) + lock.calls_open.assert_called_with({}) + await _async_lock(lock, ServiceCall(DOMAIN, SERVICE_LOCK, {ATTR_CODE: ""})) + lock.calls_lock.assert_called_with({}) + await _async_unlock(lock, ServiceCall(DOMAIN, SERVICE_UNLOCK, {ATTR_CODE: ""})) + lock.calls_unlock.assert_called_with({}) + + async def test_lock_with_default_code(hass: HomeAssistant) -> None: """Test lock entity with default code.""" lock = MockLockEntity( @@ -245,5 +293,52 @@ async def test_lock_with_default_code(hass: HomeAssistant) -> None: assert lock._lock_option_default_code == "1234" await _async_open(lock, ServiceCall(DOMAIN, SERVICE_OPEN, {})) + lock.calls_open.assert_called_with({ATTR_CODE: "1234"}) await _async_lock(lock, ServiceCall(DOMAIN, SERVICE_LOCK, {})) + lock.calls_lock.assert_called_with({ATTR_CODE: "1234"}) await _async_unlock(lock, ServiceCall(DOMAIN, SERVICE_UNLOCK, {})) + lock.calls_unlock.assert_called_with({ATTR_CODE: "1234"}) + + await _async_open(lock, ServiceCall(DOMAIN, SERVICE_OPEN, {ATTR_CODE: ""})) + lock.calls_open.assert_called_with({ATTR_CODE: "1234"}) + await _async_lock(lock, ServiceCall(DOMAIN, SERVICE_LOCK, {ATTR_CODE: ""})) + lock.calls_lock.assert_called_with({ATTR_CODE: "1234"}) + await _async_unlock(lock, ServiceCall(DOMAIN, SERVICE_UNLOCK, {ATTR_CODE: ""})) + lock.calls_unlock.assert_called_with({ATTR_CODE: "1234"}) + + +async def test_lock_with_provided_and_default_code(hass: HomeAssistant) -> None: + """Test lock entity with provided code when default code is set.""" + lock = MockLockEntity( + code_format=r"^\d{4}$", + supported_features=LockEntityFeature.OPEN, + lock_option_default_code="1234", + ) + lock.hass = hass + + await _async_open(lock, ServiceCall(DOMAIN, SERVICE_OPEN, {ATTR_CODE: "4321"})) + lock.calls_open.assert_called_with({ATTR_CODE: "4321"}) + await _async_lock(lock, ServiceCall(DOMAIN, SERVICE_LOCK, {ATTR_CODE: "4321"})) + lock.calls_lock.assert_called_with({ATTR_CODE: "4321"}) + await _async_unlock(lock, ServiceCall(DOMAIN, SERVICE_UNLOCK, {ATTR_CODE: "4321"})) + lock.calls_unlock.assert_called_with({ATTR_CODE: "4321"}) + + +async def test_lock_with_illegal_default_code(hass: HomeAssistant) -> None: + """Test lock entity with default code that does not match the code format.""" + lock = MockLockEntity( + code_format=r"^\d{4}$", + supported_features=LockEntityFeature.OPEN, + lock_option_default_code="123456", + ) + lock.hass = hass + + assert lock.state_attributes == {"code_format": r"^\d{4}$"} + assert lock._lock_option_default_code == "123456" + + with pytest.raises(ValueError): + await _async_open(lock, ServiceCall(DOMAIN, SERVICE_OPEN, {})) + with pytest.raises(ValueError): + await _async_lock(lock, ServiceCall(DOMAIN, SERVICE_LOCK, {})) + with pytest.raises(ValueError): + await _async_unlock(lock, ServiceCall(DOMAIN, SERVICE_UNLOCK, {})) diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index eaa2a1e4192..d95b409a67b 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -493,9 +493,13 @@ async def test_logbook_describe_event( hass, "fake_integration.logbook", Mock( - async_describe_events=lambda hass, async_describe_event: async_describe_event( - "test_domain", "some_event", _describe - ) + async_describe_events=( + lambda hass, async_describe_event: async_describe_event( + "test_domain", + "some_event", + _describe, + ) + ), ), ) diff --git a/tests/components/luftdaten/test_sensor.py b/tests/components/luftdaten/test_sensor.py index e9e86fd9f1b..7a2cac1721b 100644 --- a/tests/components/luftdaten/test_sensor.py +++ b/tests/components/luftdaten/test_sensor.py @@ -23,11 +23,11 @@ from tests.common import MockConfigEntry async def test_luftdaten_sensors( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, init_integration: MockConfigEntry, ) -> None: """Test the Luftdaten sensors.""" - entity_registry = er.async_get(hass) - device_registry = dr.async_get(hass) entry = entity_registry.async_get("sensor.sensor_12345_temperature") assert entry diff --git a/tests/components/lutron_caseta/test_button.py b/tests/components/lutron_caseta/test_button.py index 68742e5bae3..378db23715c 100644 --- a/tests/components/lutron_caseta/test_button.py +++ b/tests/components/lutron_caseta/test_button.py @@ -8,7 +8,9 @@ from homeassistant.helpers import entity_registry as er from . import MockBridge, async_setup_integration -async def test_button_unique_id(hass: HomeAssistant) -> None: +async def test_button_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test a button unique id.""" await async_setup_integration(hass, MockBridge) @@ -17,8 +19,6 @@ async def test_button_unique_id(hass: HomeAssistant) -> None: ) caseta_button_entity_id = "button.dining_room_pico_stop" - entity_registry = er.async_get(hass) - # Assert that Caseta buttons will have the bridge serial hash and the zone id as the uniqueID assert entity_registry.async_get(ra3_button_entity_id).unique_id == "000004d2_1372" assert ( diff --git a/tests/components/lutron_caseta/test_config_flow.py b/tests/components/lutron_caseta/test_config_flow.py index da26a55a4ef..631cb0ff1e7 100644 --- a/tests/components/lutron_caseta/test_config_flow.py +++ b/tests/components/lutron_caseta/test_config_flow.py @@ -60,7 +60,8 @@ async def test_bridge_import_flow(hass: HomeAssistant) -> None: ) as mock_setup_entry, patch( "homeassistant.components.lutron_caseta.async_setup", return_value=True ), patch.object( - Smartbridge, "create_tls" + Smartbridge, + "create_tls", ) as create_tls: create_tls.return_value = MockBridge(can_connect=True) diff --git a/tests/components/lutron_caseta/test_cover.py b/tests/components/lutron_caseta/test_cover.py index ef5fc2a5228..7fe8ed22866 100644 --- a/tests/components/lutron_caseta/test_cover.py +++ b/tests/components/lutron_caseta/test_cover.py @@ -7,13 +7,13 @@ from homeassistant.helpers import entity_registry as er from . import MockBridge, async_setup_integration -async def test_cover_unique_id(hass: HomeAssistant) -> None: +async def test_cover_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test a light unique id.""" await async_setup_integration(hass, MockBridge) cover_entity_id = "cover.basement_bedroom_left_shade" - entity_registry = er.async_get(hass) - # Assert that Caseta covers will have the bridge serial hash and the zone id as the uniqueID assert entity_registry.async_get(cover_entity_id).unique_id == "000004d2_802" diff --git a/tests/components/lutron_caseta/test_fan.py b/tests/components/lutron_caseta/test_fan.py index f9c86cc9c58..0147817514d 100644 --- a/tests/components/lutron_caseta/test_fan.py +++ b/tests/components/lutron_caseta/test_fan.py @@ -7,13 +7,13 @@ from homeassistant.helpers import entity_registry as er from . import MockBridge, async_setup_integration -async def test_fan_unique_id(hass: HomeAssistant) -> None: +async def test_fan_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test a light unique id.""" await async_setup_integration(hass, MockBridge) fan_entity_id = "fan.master_bedroom_ceiling_fan" - entity_registry = er.async_get(hass) - # Assert that Caseta covers will have the bridge serial hash and the zone id as the uniqueID assert entity_registry.async_get(fan_entity_id).unique_id == "000004d2_804" diff --git a/tests/components/lutron_caseta/test_light.py b/tests/components/lutron_caseta/test_light.py index 6449ce04832..cdba9a956e5 100644 --- a/tests/components/lutron_caseta/test_light.py +++ b/tests/components/lutron_caseta/test_light.py @@ -8,15 +8,15 @@ from homeassistant.helpers import entity_registry as er from . import MockBridge, async_setup_integration -async def test_light_unique_id(hass: HomeAssistant) -> None: +async def test_light_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test a light unique id.""" await async_setup_integration(hass, MockBridge) ra3_entity_id = "light.basement_bedroom_main_lights" caseta_entity_id = "light.kitchen_main_lights" - entity_registry = er.async_get(hass) - # Assert that RA3 lights will have the bridge serial hash and the zone id as the uniqueID assert entity_registry.async_get(ra3_entity_id).unique_id == "000004d2_801" diff --git a/tests/components/lutron_caseta/test_logbook.py b/tests/components/lutron_caseta/test_logbook.py index 8390370d16d..c0bac43ba6f 100644 --- a/tests/components/lutron_caseta/test_logbook.py +++ b/tests/components/lutron_caseta/test_logbook.py @@ -82,7 +82,7 @@ async def test_humanify_lutron_caseta_button_event(hass: HomeAssistant) -> None: async def test_humanify_lutron_caseta_button_event_integration_not_loaded( - hass: HomeAssistant, + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Test humanifying lutron_caseta_button_events when the integration fails to load.""" hass.config.components.add("recorder") @@ -109,7 +109,6 @@ async def test_humanify_lutron_caseta_button_event_integration_not_loaded( await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() - device_registry = dr.async_get(hass) for device in device_registry.devices.values(): if device.config_entries == {config_entry.entry_id}: dr_device_id = device.id @@ -140,14 +139,15 @@ async def test_humanify_lutron_caseta_button_event_integration_not_loaded( assert event1["message"] == "press stop" -async def test_humanify_lutron_caseta_button_event_ra3(hass: HomeAssistant) -> None: +async def test_humanify_lutron_caseta_button_event_ra3( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: """Test humanifying lutron_caseta_button_events from an RA3 hub.""" hass.config.components.add("recorder") assert await async_setup_component(hass, "logbook", {}) await async_setup_integration(hass, MockBridge) - registry = dr.async_get(hass) - keypad = registry.async_get_device( + keypad = device_registry.async_get_device( identifiers={(DOMAIN, 66286451)}, connections=set() ) assert keypad @@ -176,14 +176,15 @@ async def test_humanify_lutron_caseta_button_event_ra3(hass: HomeAssistant) -> N assert event1["message"] == "press Kitchen Pendants" -async def test_humanify_lutron_caseta_button_unknown_type(hass: HomeAssistant) -> None: +async def test_humanify_lutron_caseta_button_unknown_type( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: """Test humanifying lutron_caseta_button_events with an unknown type.""" hass.config.components.add("recorder") assert await async_setup_component(hass, "logbook", {}) await async_setup_integration(hass, MockBridge) - registry = dr.async_get(hass) - keypad = registry.async_get_device( + keypad = device_registry.async_get_device( identifiers={(DOMAIN, 66286451)}, connections=set() ) assert keypad diff --git a/tests/components/lutron_caseta/test_switch.py b/tests/components/lutron_caseta/test_switch.py index 842aca94423..c38305ec26b 100644 --- a/tests/components/lutron_caseta/test_switch.py +++ b/tests/components/lutron_caseta/test_switch.py @@ -6,13 +6,13 @@ from homeassistant.helpers import entity_registry as er from . import MockBridge, async_setup_integration -async def test_switch_unique_id(hass: HomeAssistant) -> None: +async def test_switch_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test a light unique id.""" await async_setup_integration(hass, MockBridge) switch_entity_id = "switch.basement_bathroom_exhaust_fan" - entity_registry = er.async_get(hass) - # Assert that Caseta covers will have the bridge serial hash and the zone id as the uniqueID assert entity_registry.async_get(switch_entity_id).unique_id == "000004d2_803" diff --git a/tests/components/matter/fixtures/config_entry_diagnostics.json b/tests/components/matter/fixtures/config_entry_diagnostics.json index 53477792e43..f591709fbda 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics.json @@ -40,11 +40,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -76,8 +76,8 @@ "0/40/17": true, "0/40/18": "869D5F986B588B29", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -122,8 +122,8 @@ "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -155,14 +155,14 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "YFX5V0js", - "IPv4Addresses": ["wKgBIw=="], - "IPv6Addresses": ["/oAAAAAAAABiVfn//ldI7A=="], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "YFX5V0js", + "5": ["wKgBIw=="], + "6": ["/oAAAAAAAABiVfn//ldI7A=="], + "7": 1 } ], "0/51/1": 3, @@ -503,19 +503,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRBRgkBwEkCAEwCUEEELwf3lni0ez0mRGa/z9gFtuTfn3Gpnsq/rBvQmpgjxqgC0RNcZmHfAm176H0j6ENQrnc1RhkKA5qiJtEgzQF4DcKNQEoARgkAgE2AwQCBAEYMAQURdGBtNYpheXbKDo2Od5OLDCytacwBRQc+rrVsNzRFL1V9i4OFnGKrwIajRgwC0AG9mdYqL5WJ0jKIBcEzeWQbo8xg6sFv0ANmq0KSpMbfqVvw8Y39XEOQ6B8v+JCXSGMpdPC0nbVQKuv/pKUvJoTGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEWYzjmQq/3zCbWfMKR0asASVnOBOkNAzdwdW1X6sC0zA5m3DhGRMEff09ZqHDZi/o6CW+I+rEGNEyW+00/M84azcKNQEpARgkAmAwBBQc+rrVsNzRFL1V9i4OFnGKrwIajTAFFI6CuLTopCFiBYeGuUcP8Ak5Jo3gGDALQDYMHSAwxZPP4TFqIGot2vm5+Wir58quxbojkWwyT9l8eat6f9sJmjTZ0VLggTwAWvY+IVm82YuMzTPxmkNWxVIY", - "fabricIndex": 1 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRBRgkBwEkCAEwCUEEELwf3lni0ez0mRGa/z9gFtuTfn3Gpnsq/rBvQmpgjxqgC0RNcZmHfAm176H0j6ENQrnc1RhkKA5qiJtEgzQF4DcKNQEoARgkAgE2AwQCBAEYMAQURdGBtNYpheXbKDo2Od5OLDCytacwBRQc+rrVsNzRFL1V9i4OFnGKrwIajRgwC0AG9mdYqL5WJ0jKIBcEzeWQbo8xg6sFv0ANmq0KSpMbfqVvw8Y39XEOQ6B8v+JCXSGMpdPC0nbVQKuv/pKUvJoTGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEWYzjmQq/3zCbWfMKR0asASVnOBOkNAzdwdW1X6sC0zA5m3DhGRMEff09ZqHDZi/o6CW+I+rEGNEyW+00/M84azcKNQEpARgkAmAwBBQc+rrVsNzRFL1V9i4OFnGKrwIajTAFFI6CuLTopCFiBYeGuUcP8Ak5Jo3gGDALQDYMHSAwxZPP4TFqIGot2vm5+Wir58quxbojkWwyT9l8eat6f9sJmjTZ0VLggTwAWvY+IVm82YuMzTPxmkNWxVIY", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "BALNCzn2XOp1NrwszT+LOLYT+tM76+Pob8AIOFl9+0UWFsLp4ZHUainZZMJQIAHxv39srVUYW0+nacFcjHTzNHw=", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 5, - "label": "", - "fabricIndex": 1 + "1": "BALNCzn2XOp1NrwszT+LOLYT+tM76+Pob8AIOFl9+0UWFsLp4ZHUainZZMJQIAHxv39srVUYW0+nacFcjHTzNHw=", + "2": 65521, + "3": 1, + "4": 5, + "5": "", + "254": 1 } ], "0/62/2": 5, @@ -540,20 +540,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, diff --git a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json index 3c5b82ad5b8..c85ee4d70e3 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json @@ -14,6 +14,7 @@ "node_id": 5, "date_commissioned": "2023-01-16T21:07:57.508440", "last_interview": "2023-01-16T21:07:57.508448", + "last_subscription_attempt": 0, "interview_version": 2, "attributes": { "0/4/0": 128, @@ -41,11 +42,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -77,8 +78,8 @@ "0/40/17": true, "0/40/18": "869D5F986B588B29", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -123,8 +124,8 @@ "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -156,14 +157,14 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "YFX5V0js", - "IPv4Addresses": ["wKgBIw=="], - "IPv6Addresses": ["/oAAAAAAAABiVfn//ldI7A=="], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "YFX5V0js", + "5": ["wKgBIw=="], + "6": ["/oAAAAAAAABiVfn//ldI7A=="], + "7": 1 } ], "0/51/1": 3, @@ -316,19 +317,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRBRgkBwEkCAEwCUEEELwf3lni0ez0mRGa/z9gFtuTfn3Gpnsq/rBvQmpgjxqgC0RNcZmHfAm176H0j6ENQrnc1RhkKA5qiJtEgzQF4DcKNQEoARgkAgE2AwQCBAEYMAQURdGBtNYpheXbKDo2Od5OLDCytacwBRQc+rrVsNzRFL1V9i4OFnGKrwIajRgwC0AG9mdYqL5WJ0jKIBcEzeWQbo8xg6sFv0ANmq0KSpMbfqVvw8Y39XEOQ6B8v+JCXSGMpdPC0nbVQKuv/pKUvJoTGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEWYzjmQq/3zCbWfMKR0asASVnOBOkNAzdwdW1X6sC0zA5m3DhGRMEff09ZqHDZi/o6CW+I+rEGNEyW+00/M84azcKNQEpARgkAmAwBBQc+rrVsNzRFL1V9i4OFnGKrwIajTAFFI6CuLTopCFiBYeGuUcP8Ak5Jo3gGDALQDYMHSAwxZPP4TFqIGot2vm5+Wir58quxbojkWwyT9l8eat6f9sJmjTZ0VLggTwAWvY+IVm82YuMzTPxmkNWxVIY", - "fabricIndex": 1 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRBRgkBwEkCAEwCUEEELwf3lni0ez0mRGa/z9gFtuTfn3Gpnsq/rBvQmpgjxqgC0RNcZmHfAm176H0j6ENQrnc1RhkKA5qiJtEgzQF4DcKNQEoARgkAgE2AwQCBAEYMAQURdGBtNYpheXbKDo2Od5OLDCytacwBRQc+rrVsNzRFL1V9i4OFnGKrwIajRgwC0AG9mdYqL5WJ0jKIBcEzeWQbo8xg6sFv0ANmq0KSpMbfqVvw8Y39XEOQ6B8v+JCXSGMpdPC0nbVQKuv/pKUvJoTGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEWYzjmQq/3zCbWfMKR0asASVnOBOkNAzdwdW1X6sC0zA5m3DhGRMEff09ZqHDZi/o6CW+I+rEGNEyW+00/M84azcKNQEpARgkAmAwBBQc+rrVsNzRFL1V9i4OFnGKrwIajTAFFI6CuLTopCFiBYeGuUcP8Ak5Jo3gGDALQDYMHSAwxZPP4TFqIGot2vm5+Wir58quxbojkWwyT9l8eat6f9sJmjTZ0VLggTwAWvY+IVm82YuMzTPxmkNWxVIY", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "BALNCzn2XOp1NrwszT+LOLYT+tM76+Pob8AIOFl9+0UWFsLp4ZHUainZZMJQIAHxv39srVUYW0+nacFcjHTzNHw=", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 5, - "label": "", - "fabricIndex": 1 + "1": "BALNCzn2XOp1NrwszT+LOLYT+tM76+Pob8AIOFl9+0UWFsLp4ZHUainZZMJQIAHxv39srVUYW0+nacFcjHTzNHw=", + "2": 65521, + "3": 1, + "4": 5, + "5": "", + "254": 1 } ], "0/62/2": 5, @@ -353,20 +354,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, diff --git a/tests/components/matter/fixtures/nodes/color-temperature-light.json b/tests/components/matter/fixtures/nodes/color-temperature-light.json index 7552fa833fb..45d1c18635c 100644 --- a/tests/components/matter/fixtures/nodes/color-temperature-light.json +++ b/tests/components/matter/fixtures/nodes/color-temperature-light.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [29, 31, 40, 48, 49, 51, 60, 62, 63], @@ -20,11 +20,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 52 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 52 } ], "0/31/1": [], @@ -50,8 +50,8 @@ "0/40/17": true, "0/40/18": "mock-color-temperature-light", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 65535 + "0": 3, + "1": 65535 }, "0/40/65532": 0, "0/40/65533": 1, @@ -63,8 +63,8 @@ ], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 2, "0/48/3": 2, @@ -77,8 +77,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "ZXRoMA==", - "connected": true + "0": "ZXRoMA==", + "1": true } ], "0/49/4": true, @@ -92,38 +92,38 @@ "0/49/65531": [0, 1, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "eth1", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "ABeILIy4", - "IPv4Addresses": ["CjwBuw=="], - "IPv6Addresses": [ + "0": "eth1", + "1": true, + "2": null, + "3": null, + "4": "ABeILIy4", + "5": ["CjwBuw=="], + "6": [ "/VqgxiAxQiYCF4j//iyMuA==", "IAEEcLs7AAYCF4j//iyMuA==", "/oAAAAAAAAACF4j//iyMuA==" ], - "type": 0 + "7": 0 }, { - "name": "eth0", - "isOperational": false, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "AAN/ESDO", - "IPv4Addresses": [], - "IPv6Addresses": [], - "type": 2 + "0": "eth0", + "1": false, + "2": null, + "3": null, + "4": "AAN/ESDO", + "5": [], + "6": [], + "7": 2 }, { - "name": "lo", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "AAAAAAAA", - "IPv4Addresses": ["fwAAAQ=="], - "IPv6Addresses": ["AAAAAAAAAAAAAAAAAAAAAQ=="], - "type": 0 + "0": "lo", + "1": true, + "2": null, + "3": null, + "4": "AAAAAAAA", + "5": ["fwAAAQ=="], + "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], + "7": 0 } ], "0/51/1": 4, @@ -151,19 +151,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEEYGMAhVV+Adasucgyi++1D7eyBIfHs9xLKJPVJqJdMAqt0S8lQs+6v/NAyAVXsN8jdGlNgZQENRnfqC2gXv3COzcKNQEoARgkAgE2AwQCBAEYMAQUTK/GvAzp9yCT0ihFRaEyW8KuO0IwBRQ5RmCO0h/Cd/uv6Pe62ZSLBzXOtBgwC0CaO1hqAR9PQJUkSx4MQyHEDQND/3j7m6EPRImPCA53dKI7e4w7xZEQEW95oMhuUobdy3WbMcggAMTX46ninwqUGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEqboEMvSYpJHvrznp5AQ1fHW0AVUrTajBHZ/2uba7+FTyPb+fqgf6K1zbuMqTxTOA/FwjzAL7hQTwG+HNnmLwNTcKNQEpARgkAmAwBBQ5RmCO0h/Cd/uv6Pe62ZSLBzXOtDAFFG02YRl97W++GsAiEiBzIhO0hzA6GDALQBl+ZyFbSXu3oXVJGBjtDcpwOCRC30OaVjDhUT7NbohDLaKuwxMhAgE+uHtSLKRZPGlQGSzYdnDGj/dWolGE+n4Y", - "fabricIndex": 52 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEEYGMAhVV+Adasucgyi++1D7eyBIfHs9xLKJPVJqJdMAqt0S8lQs+6v/NAyAVXsN8jdGlNgZQENRnfqC2gXv3COzcKNQEoARgkAgE2AwQCBAEYMAQUTK/GvAzp9yCT0ihFRaEyW8KuO0IwBRQ5RmCO0h/Cd/uv6Pe62ZSLBzXOtBgwC0CaO1hqAR9PQJUkSx4MQyHEDQND/3j7m6EPRImPCA53dKI7e4w7xZEQEW95oMhuUobdy3WbMcggAMTX46ninwqUGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEqboEMvSYpJHvrznp5AQ1fHW0AVUrTajBHZ/2uba7+FTyPb+fqgf6K1zbuMqTxTOA/FwjzAL7hQTwG+HNnmLwNTcKNQEpARgkAmAwBBQ5RmCO0h/Cd/uv6Pe62ZSLBzXOtDAFFG02YRl97W++GsAiEiBzIhO0hzA6GDALQBl+ZyFbSXu3oXVJGBjtDcpwOCRC30OaVjDhUT7NbohDLaKuwxMhAgE+uHtSLKRZPGlQGSzYdnDGj/dWolGE+n4Y", + "254": 52 } ], "0/62/1": [ { - "rootPublicKey": "BOI8+YJvCUh78+5WD4aHD7t1HQJS3WMrCEknk6n+5HXP2VRMB3SvK6+EEa8rR6UkHnCryIREeOmS0XYozzHjTQg=", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 52 + "1": "BOI8+YJvCUh78+5WD4aHD7t1HQJS3WMrCEknk6n+5HXP2VRMB3SvK6+EEa8rR6UkHnCryIREeOmS0XYozzHjTQg=", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 52 } ], "0/62/2": 16, @@ -202,8 +202,8 @@ ], "1/29/0": [ { - "deviceType": 268, - "revision": 1 + "0": 268, + "1": 1 } ], "1/29/1": [6, 29, 57, 768, 8, 80, 3, 4], @@ -277,19 +277,19 @@ "1/80/1": 0, "1/80/2": [ { - "label": "Dark", - "mode": 0, - "semanticTags": [] + "0": "Dark", + "1": 0, + "2": [] }, { - "label": "Medium", - "mode": 1, - "semanticTags": [] + "0": "Medium", + "1": 1, + "2": [] }, { - "label": "Light", - "mode": 2, - "semanticTags": [] + "0": "Light", + "1": 2, + "2": [] } ], "1/80/3": 0, diff --git a/tests/components/matter/fixtures/nodes/device_diagnostics.json b/tests/components/matter/fixtures/nodes/device_diagnostics.json index 4b834cd9090..d95fbe5efa9 100644 --- a/tests/components/matter/fixtures/nodes/device_diagnostics.json +++ b/tests/components/matter/fixtures/nodes/device_diagnostics.json @@ -3,6 +3,7 @@ "date_commissioned": "2023-01-16T21:07:57.508440", "last_interview": "2023-01-16T21:07:57.508448", "interview_version": 2, + "last_subscription_attempt": 0, "attributes": { "0/4/0": 128, "0/4/65532": 1, @@ -12,8 +13,8 @@ "0/4/65531": [0, 65528, 65529, 65531, 65532, 65533], "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -29,11 +30,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -65,8 +66,8 @@ "0/40/17": true, "0/40/18": "869D5F986B588B29", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -111,8 +112,8 @@ "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -142,14 +143,14 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "YFX5V0js", - "IPv4Addresses": ["wKgBIw=="], - "IPv6Addresses": ["/oAAAAAAAABiVfn//ldI7A=="], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "YFX5V0js", + "5": ["wKgBIw=="], + "6": ["/oAAAAAAAABiVfn//ldI7A=="], + "7": 1 } ], "0/51/1": 3, @@ -301,19 +302,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRBRgkBwEkCAEwCUEEELwf3lni0ez0mRGa/z9gFtuTfn3Gpnsq/rBvQmpgjxqgC0RNcZmHfAm176H0j6ENQrnc1RhkKA5qiJtEgzQF4DcKNQEoARgkAgE2AwQCBAEYMAQURdGBtNYpheXbKDo2Od5OLDCytacwBRQc+rrVsNzRFL1V9i4OFnGKrwIajRgwC0AG9mdYqL5WJ0jKIBcEzeWQbo8xg6sFv0ANmq0KSpMbfqVvw8Y39XEOQ6B8v+JCXSGMpdPC0nbVQKuv/pKUvJoTGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEWYzjmQq/3zCbWfMKR0asASVnOBOkNAzdwdW1X6sC0zA5m3DhGRMEff09ZqHDZi/o6CW+I+rEGNEyW+00/M84azcKNQEpARgkAmAwBBQc+rrVsNzRFL1V9i4OFnGKrwIajTAFFI6CuLTopCFiBYeGuUcP8Ak5Jo3gGDALQDYMHSAwxZPP4TFqIGot2vm5+Wir58quxbojkWwyT9l8eat6f9sJmjTZ0VLggTwAWvY+IVm82YuMzTPxmkNWxVIY", - "fabricIndex": 1 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRBRgkBwEkCAEwCUEEELwf3lni0ez0mRGa/z9gFtuTfn3Gpnsq/rBvQmpgjxqgC0RNcZmHfAm176H0j6ENQrnc1RhkKA5qiJtEgzQF4DcKNQEoARgkAgE2AwQCBAEYMAQURdGBtNYpheXbKDo2Od5OLDCytacwBRQc+rrVsNzRFL1V9i4OFnGKrwIajRgwC0AG9mdYqL5WJ0jKIBcEzeWQbo8xg6sFv0ANmq0KSpMbfqVvw8Y39XEOQ6B8v+JCXSGMpdPC0nbVQKuv/pKUvJoTGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEWYzjmQq/3zCbWfMKR0asASVnOBOkNAzdwdW1X6sC0zA5m3DhGRMEff09ZqHDZi/o6CW+I+rEGNEyW+00/M84azcKNQEpARgkAmAwBBQc+rrVsNzRFL1V9i4OFnGKrwIajTAFFI6CuLTopCFiBYeGuUcP8Ak5Jo3gGDALQDYMHSAwxZPP4TFqIGot2vm5+Wir58quxbojkWwyT9l8eat6f9sJmjTZ0VLggTwAWvY+IVm82YuMzTPxmkNWxVIY", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "BALNCzn2XOp1NrwszT+LOLYT+tM76+Pob8AIOFl9+0UWFsLp4ZHUainZZMJQIAHxv39srVUYW0+nacFcjHTzNHw=", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 5, - "label": "", - "fabricIndex": 1 + "1": "BALNCzn2XOp1NrwszT+LOLYT+tM76+Pob8AIOFl9+0UWFsLp4ZHUainZZMJQIAHxv39srVUYW0+nacFcjHTzNHw=", + "2": 65521, + "3": 1, + "4": 5, + "5": "", + "254": 1 } ], "0/62/2": 5, @@ -338,20 +339,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, @@ -414,8 +415,8 @@ ], "1/29/0": [ { - "deviceType": 257, - "revision": 1 + "0": 257, + "1": 1 } ], "1/29/1": [3, 4, 6, 8, 29, 768, 1030], diff --git a/tests/components/matter/fixtures/nodes/dimmable-light.json b/tests/components/matter/fixtures/nodes/dimmable-light.json index e14c922857c..7ccc3eef3af 100644 --- a/tests/components/matter/fixtures/nodes/dimmable-light.json +++ b/tests/components/matter/fixtures/nodes/dimmable-light.json @@ -12,8 +12,8 @@ "0/4/65531": [0, 65528, 65529, 65531, 65532, 65533], "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -29,11 +29,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -65,8 +65,8 @@ "0/40/17": true, "0/40/18": "mock-dimmable-light", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -111,8 +111,8 @@ "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -125,8 +125,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "", - "connected": true + "0": "", + "1": true } ], "0/49/2": 10, @@ -147,14 +147,14 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "", - "IPv4Addresses": [], - "IPv6Addresses": [], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "", + "5": [], + "6": [], + "7": 1 } ], "0/51/1": 6, @@ -243,19 +243,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "", - "icac": "", - "fabricIndex": 1 + "1": "", + "2": "", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 1 + "1": "", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 1 } ], "0/62/2": 5, @@ -278,20 +278,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, @@ -354,8 +354,8 @@ ], "1/29/0": [ { - "deviceType": 257, - "revision": 1 + "0": 257, + "1": 1 } ], "1/29/1": [3, 4, 6, 8, 29, 768, 1030], diff --git a/tests/components/matter/fixtures/nodes/door-lock-with-unbolt.json b/tests/components/matter/fixtures/nodes/door-lock-with-unbolt.json index 6cbd75ab09c..dfa7794f28b 100644 --- a/tests/components/matter/fixtures/nodes/door-lock-with-unbolt.json +++ b/tests/components/matter/fixtures/nodes/door-lock-with-unbolt.json @@ -7,8 +7,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -24,11 +24,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -60,8 +60,8 @@ "0/40/17": true, "0/40/18": "mock-door-lock", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 65535 + "0": 3, + "1": 65535 }, "0/40/65532": 0, "0/40/65533": 1, @@ -121,8 +121,8 @@ "0/47/65531": [0, 1, 2, 6, 65528, 65529, 65530, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 2, @@ -154,28 +154,28 @@ "0/50/65531": [65528, 65529, 65530, 65531, 65532, 65533], "0/51/0": [ { - "name": "eth0", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "/mQDt/2Q", - "IPv4Addresses": ["CjwBaQ=="], - "IPv6Addresses": [ + "0": "eth0", + "1": true, + "2": null, + "3": null, + "4": "/mQDt/2Q", + "5": ["CjwBaQ=="], + "6": [ "/VqgxiAxQib8ZAP//rf9kA==", "IAEEcLs7AAb8ZAP//rf9kA==", "/oAAAAAAAAD8ZAP//rf9kA==" ], - "type": 2 + "7": 2 }, { - "name": "lo", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "AAAAAAAA", - "IPv4Addresses": ["fwAAAQ=="], - "IPv6Addresses": ["AAAAAAAAAAAAAAAAAAAAAQ=="], - "type": 0 + "0": "lo", + "1": true, + "2": null, + "3": null, + "4": "AAAAAAAA", + "5": ["fwAAAQ=="], + "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], + "7": 0 } ], "0/51/1": 1, @@ -195,39 +195,39 @@ ], "0/52/0": [ { - "id": 26957, - "name": "26957", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26957, + "1": "26957", + "2": null, + "3": null, + "4": null }, { - "id": 26956, - "name": "26956", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26956, + "1": "26956", + "2": null, + "3": null, + "4": null }, { - "id": 26955, - "name": "26955", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26955, + "1": "26955", + "2": null, + "3": null, + "4": null }, { - "id": 26953, - "name": "26953", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26953, + "1": "26953", + "2": null, + "3": null, + "4": null }, { - "id": 26952, - "name": "26952", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26952, + "1": "26952", + "2": null, + "3": null, + "4": null } ], "0/52/1": 351120, @@ -358,19 +358,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], "0/62/0": [ { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEE55h6CbNLPZH/uM3/rDdA+jeuuD2QSPN8gBeEB0bmGJqWz/gCT4/ySB77rK3XiwVWVAmJhJ/eMcTIA0XXWMqKPDcKNQEoARgkAgE2AwQCBAEYMAQUqnKiC76YFhcTHt4AQ/kAbtrZ2MowBRSL6EWyWm8+uC0Puc2/BncMqYbpmhgwC0AA05Z+y1mcyHUeOFJ5kyDJJMN/oNCwN5h8UpYN/868iuQArr180/fbaN1+db9lab4D2lf0HK7wgHIR3HsOa2w9GA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEE5R1DrUQE/L8tx95WR1g1dZJf4d+6LEB7JAYZN/nw9ZBUg5VOHDrB1xIw5KguYJzt10K+0KqQBBEbuwW+wLLobTcKNQEpARgkAmAwBBSL6EWyWm8+uC0Puc2/BncMqYbpmjAFFM0I6fPFzfOv2IWbX1huxb3eW0fqGDALQHXLE0TgIDW6XOnvtsOJCyKoENts8d4TQWBgTKviv1LF/+MS9eFYi+kO+1Idq5mVgwN+lH7eyecShQR0iqq6WLUY", - "fabricIndex": 1 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEE55h6CbNLPZH/uM3/rDdA+jeuuD2QSPN8gBeEB0bmGJqWz/gCT4/ySB77rK3XiwVWVAmJhJ/eMcTIA0XXWMqKPDcKNQEoARgkAgE2AwQCBAEYMAQUqnKiC76YFhcTHt4AQ/kAbtrZ2MowBRSL6EWyWm8+uC0Puc2/BncMqYbpmhgwC0AA05Z+y1mcyHUeOFJ5kyDJJMN/oNCwN5h8UpYN/868iuQArr180/fbaN1+db9lab4D2lf0HK7wgHIR3HsOa2w9GA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEE5R1DrUQE/L8tx95WR1g1dZJf4d+6LEB7JAYZN/nw9ZBUg5VOHDrB1xIw5KguYJzt10K+0KqQBBEbuwW+wLLobTcKNQEpARgkAmAwBBSL6EWyWm8+uC0Puc2/BncMqYbpmjAFFM0I6fPFzfOv2IWbX1huxb3eW0fqGDALQHXLE0TgIDW6XOnvtsOJCyKoENts8d4TQWBgTKviv1LF/+MS9eFYi+kO+1Idq5mVgwN+lH7eyecShQR0iqq6WLUY", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "BJ/jL2MdDrdq9TahKSa5c/dBc166NRCU0W9l7hK2kcuVtN915DLqiS+RAJ2iPEvWK5FawZHF/QdKLZmTkZHudxY=", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 1 + "1": "BJ/jL2MdDrdq9TahKSa5c/dBc166NRCU0W9l7hK2kcuVtN915DLqiS+RAJ2iPEvWK5FawZHF/QdKLZmTkZHudxY=", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 1 } ], "0/62/2": 16, @@ -395,20 +395,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, @@ -443,8 +443,8 @@ ], "1/29/0": [ { - "deviceType": 10, - "revision": 1 + "0": 10, + "1": 1 } ], "1/29/1": [3, 6, 29, 47, 257], diff --git a/tests/components/matter/fixtures/nodes/door-lock.json b/tests/components/matter/fixtures/nodes/door-lock.json index 1477d78aa67..8a3f0fd68dd 100644 --- a/tests/components/matter/fixtures/nodes/door-lock.json +++ b/tests/components/matter/fixtures/nodes/door-lock.json @@ -7,8 +7,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -24,11 +24,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -60,8 +60,8 @@ "0/40/17": true, "0/40/18": "mock-door-lock", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 65535 + "0": 3, + "1": 65535 }, "0/40/65532": 0, "0/40/65533": 1, @@ -121,8 +121,8 @@ "0/47/65531": [0, 1, 2, 6, 65528, 65529, 65530, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 2, @@ -154,28 +154,28 @@ "0/50/65531": [65528, 65529, 65530, 65531, 65532, 65533], "0/51/0": [ { - "name": "eth0", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "/mQDt/2Q", - "IPv4Addresses": ["CjwBaQ=="], - "IPv6Addresses": [ + "0": "eth0", + "1": true, + "2": null, + "3": null, + "4": "/mQDt/2Q", + "5": ["CjwBaQ=="], + "6": [ "/VqgxiAxQib8ZAP//rf9kA==", "IAEEcLs7AAb8ZAP//rf9kA==", "/oAAAAAAAAD8ZAP//rf9kA==" ], - "type": 2 + "7": 2 }, { - "name": "lo", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "AAAAAAAA", - "IPv4Addresses": ["fwAAAQ=="], - "IPv6Addresses": ["AAAAAAAAAAAAAAAAAAAAAQ=="], - "type": 0 + "0": "lo", + "1": true, + "2": null, + "3": null, + "4": "AAAAAAAA", + "5": ["fwAAAQ=="], + "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], + "7": 0 } ], "0/51/1": 1, @@ -195,39 +195,39 @@ ], "0/52/0": [ { - "id": 26957, - "name": "26957", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26957, + "1": "26957", + "2": null, + "3": null, + "4": null }, { - "id": 26956, - "name": "26956", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26956, + "1": "26956", + "2": null, + "3": null, + "4": null }, { - "id": 26955, - "name": "26955", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26955, + "1": "26955", + "2": null, + "3": null, + "4": null }, { - "id": 26953, - "name": "26953", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26953, + "1": "26953", + "2": null, + "3": null, + "4": null }, { - "id": 26952, - "name": "26952", - "stackFreeCurrent": null, - "stackFreeMinimum": null, - "stackSize": null + "0": 26952, + "1": "26952", + "2": null, + "3": null, + "4": null } ], "0/52/1": 351120, @@ -358,19 +358,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], "0/62/0": [ { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEE55h6CbNLPZH/uM3/rDdA+jeuuD2QSPN8gBeEB0bmGJqWz/gCT4/ySB77rK3XiwVWVAmJhJ/eMcTIA0XXWMqKPDcKNQEoARgkAgE2AwQCBAEYMAQUqnKiC76YFhcTHt4AQ/kAbtrZ2MowBRSL6EWyWm8+uC0Puc2/BncMqYbpmhgwC0AA05Z+y1mcyHUeOFJ5kyDJJMN/oNCwN5h8UpYN/868iuQArr180/fbaN1+db9lab4D2lf0HK7wgHIR3HsOa2w9GA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEE5R1DrUQE/L8tx95WR1g1dZJf4d+6LEB7JAYZN/nw9ZBUg5VOHDrB1xIw5KguYJzt10K+0KqQBBEbuwW+wLLobTcKNQEpARgkAmAwBBSL6EWyWm8+uC0Puc2/BncMqYbpmjAFFM0I6fPFzfOv2IWbX1huxb3eW0fqGDALQHXLE0TgIDW6XOnvtsOJCyKoENts8d4TQWBgTKviv1LF/+MS9eFYi+kO+1Idq5mVgwN+lH7eyecShQR0iqq6WLUY", - "fabricIndex": 1 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEE55h6CbNLPZH/uM3/rDdA+jeuuD2QSPN8gBeEB0bmGJqWz/gCT4/ySB77rK3XiwVWVAmJhJ/eMcTIA0XXWMqKPDcKNQEoARgkAgE2AwQCBAEYMAQUqnKiC76YFhcTHt4AQ/kAbtrZ2MowBRSL6EWyWm8+uC0Puc2/BncMqYbpmhgwC0AA05Z+y1mcyHUeOFJ5kyDJJMN/oNCwN5h8UpYN/868iuQArr180/fbaN1+db9lab4D2lf0HK7wgHIR3HsOa2w9GA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEE5R1DrUQE/L8tx95WR1g1dZJf4d+6LEB7JAYZN/nw9ZBUg5VOHDrB1xIw5KguYJzt10K+0KqQBBEbuwW+wLLobTcKNQEpARgkAmAwBBSL6EWyWm8+uC0Puc2/BncMqYbpmjAFFM0I6fPFzfOv2IWbX1huxb3eW0fqGDALQHXLE0TgIDW6XOnvtsOJCyKoENts8d4TQWBgTKviv1LF/+MS9eFYi+kO+1Idq5mVgwN+lH7eyecShQR0iqq6WLUY", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "BJ/jL2MdDrdq9TahKSa5c/dBc166NRCU0W9l7hK2kcuVtN915DLqiS+RAJ2iPEvWK5FawZHF/QdKLZmTkZHudxY=", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 1 + "1": "BJ/jL2MdDrdq9TahKSa5c/dBc166NRCU0W9l7hK2kcuVtN915DLqiS+RAJ2iPEvWK5FawZHF/QdKLZmTkZHudxY=", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 1 } ], "0/62/2": 16, @@ -395,20 +395,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, @@ -443,8 +443,8 @@ ], "1/29/0": [ { - "deviceType": 10, - "revision": 1 + "0": 10, + "1": 1 } ], "1/29/1": [3, 6, 29, 47, 257], diff --git a/tests/components/matter/fixtures/nodes/eve-contact-sensor.json b/tests/components/matter/fixtures/nodes/eve-contact-sensor.json index b0eacfb621c..a009796f940 100644 --- a/tests/components/matter/fixtures/nodes/eve-contact-sensor.json +++ b/tests/components/matter/fixtures/nodes/eve-contact-sensor.json @@ -12,16 +12,16 @@ "0/53/47": 0, "0/53/8": [ { - "extAddress": 12872547289273451492, - "rloc16": 1024, - "routerId": 1, - "nextHop": 0, - "pathCost": 0, - "LQIIn": 3, - "LQIOut": 3, - "age": 142, - "allocated": true, - "linkEstablished": true + "0": 12872547289273451492, + "1": 1024, + "2": 1, + "3": 0, + "4": 0, + "5": 3, + "6": 3, + "7": 142, + "8": true, + "9": true } ], "0/53/29": 1556, @@ -30,20 +30,20 @@ "0/53/40": 519, "0/53/7": [ { - "extAddress": 12872547289273451492, - "age": 654, - "rloc16": 1024, - "linkFrameCounter": 738, - "mleFrameCounter": 418, - "lqi": 3, - "averageRssi": -50, - "lastRssi": -51, - "frameErrorRate": 5, - "messageErrorRate": 0, - "rxOnWhenIdle": true, - "fullThreadDevice": true, - "fullNetworkData": true, - "isChild": false + "0": 12872547289273451492, + "1": 654, + "2": 1024, + "3": 738, + "4": 418, + "5": 3, + "6": -50, + "7": -51, + "8": 5, + "9": 0, + "10": true, + "11": true, + "12": true, + "13": false } ], "0/53/33": 66, @@ -124,9 +124,9 @@ "0/53/16": 0, "0/42/0": [ { - "providerNodeID": 1773685588, - "endpoint": 0, - "fabricIndex": 1 + "1": 1773685588, + "2": 0, + "254": 1 } ], "0/42/65528": [], @@ -140,8 +140,8 @@ "0/48/65532": 0, "0/48/65528": [1, 3, 5], "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/4": true, "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], @@ -158,25 +158,25 @@ "0/31/1": [], "0/31/0": [ { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 1 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 1 }, { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 2 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 2 }, { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 3 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 3 } ], "0/31/65532": 0, @@ -187,8 +187,8 @@ "0/49/65533": 1, "0/49/1": [ { - "networkID": "Uv50lWMtT7s=", - "connected": true + "0": "Uv50lWMtT7s=", + "1": true } ], "0/49/3": 20, @@ -217,8 +217,8 @@ "0/29/65533": 1, "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [29, 31, 40, 42, 46, 48, 49, 51, 53, 60, 62, 63], @@ -226,18 +226,18 @@ "0/51/65531": [0, 1, 2, 3, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "ieee802154", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "YtmXHFJ/dhk=", - "IPv4Addresses": [], - "IPv6Addresses": [ + "0": "ieee802154", + "1": true, + "2": null, + "3": null, + "4": "YtmXHFJ/dhk=", + "5": [], + "6": [ "/RG+U41GAABynlpPU50e5g==", "/oAAAAAAAABg2ZccUn92GQ==", "/VL+dJVjAAB1cwmi02rvTA==" ], - "type": 4 + "7": 4 } ], "0/51/65529": [0], @@ -261,8 +261,8 @@ "0/40/6": "**REDACTED**", "0/40/3": "Eve Door", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/2": 4874, "0/40/65532": 0, @@ -302,8 +302,8 @@ "1/29/65533": 1, "1/29/0": [ { - "deviceType": 21, - "revision": 1 + "0": 21, + "1": 1 } ], "1/29/65528": [], diff --git a/tests/components/matter/fixtures/nodes/extended-color-light.json b/tests/components/matter/fixtures/nodes/extended-color-light.json index f4d83239b6d..d18b76768ca 100644 --- a/tests/components/matter/fixtures/nodes/extended-color-light.json +++ b/tests/components/matter/fixtures/nodes/extended-color-light.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [29, 31, 40, 48, 49, 51, 60, 62, 63], @@ -20,11 +20,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 52 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 52 } ], "0/31/1": [], @@ -50,8 +50,8 @@ "0/40/17": true, "0/40/18": "mock-extended-color-light", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 65535 + "0": 3, + "1": 65535 }, "0/40/65532": 0, "0/40/65533": 1, @@ -63,8 +63,8 @@ ], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 2, "0/48/3": 2, @@ -77,8 +77,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "ZXRoMA==", - "connected": true + "0": "ZXRoMA==", + "1": true } ], "0/49/4": true, @@ -92,38 +92,38 @@ "0/49/65531": [0, 1, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "eth1", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "ABeILIy4", - "IPv4Addresses": ["CjwBuw=="], - "IPv6Addresses": [ + "0": "eth1", + "1": true, + "2": null, + "3": null, + "4": "ABeILIy4", + "5": ["CjwBuw=="], + "6": [ "/VqgxiAxQiYCF4j//iyMuA==", "IAEEcLs7AAYCF4j//iyMuA==", "/oAAAAAAAAACF4j//iyMuA==" ], - "type": 0 + "7": 0 }, { - "name": "eth0", - "isOperational": false, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "AAN/ESDO", - "IPv4Addresses": [], - "IPv6Addresses": [], - "type": 2 + "0": "eth0", + "1": false, + "2": null, + "3": null, + "4": "AAN/ESDO", + "5": [], + "6": [], + "7": 2 }, { - "name": "lo", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "AAAAAAAA", - "IPv4Addresses": ["fwAAAQ=="], - "IPv6Addresses": ["AAAAAAAAAAAAAAAAAAAAAQ=="], - "type": 0 + "0": "lo", + "1": true, + "2": null, + "3": null, + "4": "AAAAAAAA", + "5": ["fwAAAQ=="], + "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], + "7": 0 } ], "0/51/1": 4, @@ -151,19 +151,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEEYGMAhVV+Adasucgyi++1D7eyBIfHs9xLKJPVJqJdMAqt0S8lQs+6v/NAyAVXsN8jdGlNgZQENRnfqC2gXv3COzcKNQEoARgkAgE2AwQCBAEYMAQUTK/GvAzp9yCT0ihFRaEyW8KuO0IwBRQ5RmCO0h/Cd/uv6Pe62ZSLBzXOtBgwC0CaO1hqAR9PQJUkSx4MQyHEDQND/3j7m6EPRImPCA53dKI7e4w7xZEQEW95oMhuUobdy3WbMcggAMTX46ninwqUGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEqboEMvSYpJHvrznp5AQ1fHW0AVUrTajBHZ/2uba7+FTyPb+fqgf6K1zbuMqTxTOA/FwjzAL7hQTwG+HNnmLwNTcKNQEpARgkAmAwBBQ5RmCO0h/Cd/uv6Pe62ZSLBzXOtDAFFG02YRl97W++GsAiEiBzIhO0hzA6GDALQBl+ZyFbSXu3oXVJGBjtDcpwOCRC30OaVjDhUT7NbohDLaKuwxMhAgE+uHtSLKRZPGlQGSzYdnDGj/dWolGE+n4Y", - "fabricIndex": 52 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEEYGMAhVV+Adasucgyi++1D7eyBIfHs9xLKJPVJqJdMAqt0S8lQs+6v/NAyAVXsN8jdGlNgZQENRnfqC2gXv3COzcKNQEoARgkAgE2AwQCBAEYMAQUTK/GvAzp9yCT0ihFRaEyW8KuO0IwBRQ5RmCO0h/Cd/uv6Pe62ZSLBzXOtBgwC0CaO1hqAR9PQJUkSx4MQyHEDQND/3j7m6EPRImPCA53dKI7e4w7xZEQEW95oMhuUobdy3WbMcggAMTX46ninwqUGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEqboEMvSYpJHvrznp5AQ1fHW0AVUrTajBHZ/2uba7+FTyPb+fqgf6K1zbuMqTxTOA/FwjzAL7hQTwG+HNnmLwNTcKNQEpARgkAmAwBBQ5RmCO0h/Cd/uv6Pe62ZSLBzXOtDAFFG02YRl97W++GsAiEiBzIhO0hzA6GDALQBl+ZyFbSXu3oXVJGBjtDcpwOCRC30OaVjDhUT7NbohDLaKuwxMhAgE+uHtSLKRZPGlQGSzYdnDGj/dWolGE+n4Y", + "254": 52 } ], "0/62/1": [ { - "rootPublicKey": "BOI8+YJvCUh78+5WD4aHD7t1HQJS3WMrCEknk6n+5HXP2VRMB3SvK6+EEa8rR6UkHnCryIREeOmS0XYozzHjTQg=", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 52 + "1": "BOI8+YJvCUh78+5WD4aHD7t1HQJS3WMrCEknk6n+5HXP2VRMB3SvK6+EEa8rR6UkHnCryIREeOmS0XYozzHjTQg=", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 52 } ], "0/62/2": 16, @@ -202,8 +202,8 @@ ], "1/29/0": [ { - "deviceType": 269, - "revision": 1 + "0": 269, + "1": 1 } ], "1/29/1": [6, 29, 57, 768, 8, 80, 3, 4], @@ -277,19 +277,19 @@ "1/80/1": 0, "1/80/2": [ { - "label": "Dark", - "mode": 0, - "semanticTags": [] + "0": "Dark", + "1": 0, + "2": [] }, { - "label": "Medium", - "mode": 1, - "semanticTags": [] + "0": "Medium", + "1": 1, + "2": [] }, { - "label": "Light", - "mode": 2, - "semanticTags": [] + "0": "Light", + "1": 2, + "2": [] } ], "1/80/3": 0, diff --git a/tests/components/matter/fixtures/nodes/flow-sensor.json b/tests/components/matter/fixtures/nodes/flow-sensor.json index e1fc2a36585..a8dad202fa1 100644 --- a/tests/components/matter/fixtures/nodes/flow-sensor.json +++ b/tests/components/matter/fixtures/nodes/flow-sensor.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-flow-sensor", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -56,8 +56,8 @@ "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 774, - "revision": 1 + "0": 774, + "1": 1 } ], "1/29/1": [6, 29, 57, 768, 8, 40], diff --git a/tests/components/matter/fixtures/nodes/generic-switch-multi.json b/tests/components/matter/fixtures/nodes/generic-switch-multi.json index 15c93825307..f564e91a1ce 100644 --- a/tests/components/matter/fixtures/nodes/generic-switch-multi.json +++ b/tests/components/matter/fixtures/nodes/generic-switch-multi.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-generic-switch", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -56,8 +56,8 @@ "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 15, - "revision": 1 + "0": 15, + "1": 1 } ], "1/29/1": [3, 29, 59], @@ -77,17 +77,16 @@ "1/59/65528": [], "1/64/0": [ { - "label": "Label", - "value": "1" + "0": "Label", + "1": "1" } ], - "2/3/65529": [0, 64], "2/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "2/29/0": [ { - "deviceType": 15, - "revision": 1 + "0": 15, + "1": 1 } ], "2/29/1": [3, 29, 59], @@ -107,8 +106,8 @@ "2/59/65528": [], "2/64/0": [ { - "label": "Label", - "value": "Fancy Button" + "0": "Label", + "1": "Fancy Button" } ] }, diff --git a/tests/components/matter/fixtures/nodes/generic-switch.json b/tests/components/matter/fixtures/nodes/generic-switch.json index 30763c88e5b..80773915748 100644 --- a/tests/components/matter/fixtures/nodes/generic-switch.json +++ b/tests/components/matter/fixtures/nodes/generic-switch.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-generic-switch", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -56,8 +56,8 @@ "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 15, - "revision": 1 + "0": 15, + "1": 1 } ], "1/29/1": [3, 29, 59], diff --git a/tests/components/matter/fixtures/nodes/humidity-sensor.json b/tests/components/matter/fixtures/nodes/humidity-sensor.json index a1940fc1857..8220c9cf8f8 100644 --- a/tests/components/matter/fixtures/nodes/humidity-sensor.json +++ b/tests/components/matter/fixtures/nodes/humidity-sensor.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-humidity-sensor", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -56,8 +56,8 @@ "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 775, - "revision": 1 + "0": 775, + "1": 1 } ], "1/29/1": [6, 29, 57, 768, 8, 40], diff --git a/tests/components/matter/fixtures/nodes/light-sensor.json b/tests/components/matter/fixtures/nodes/light-sensor.json index 93583c34292..c4d84bc7923 100644 --- a/tests/components/matter/fixtures/nodes/light-sensor.json +++ b/tests/components/matter/fixtures/nodes/light-sensor.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-light-sensor", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -56,8 +56,8 @@ "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 262, - "revision": 1 + "0": 262, + "1": 1 } ], "1/29/1": [6, 29, 57, 768, 8, 40], diff --git a/tests/components/matter/fixtures/nodes/occupancy-sensor.json b/tests/components/matter/fixtures/nodes/occupancy-sensor.json index d8f2580c2b0..f63dd43362b 100644 --- a/tests/components/matter/fixtures/nodes/occupancy-sensor.json +++ b/tests/components/matter/fixtures/nodes/occupancy-sensor.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-temperature-sensor", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -61,8 +61,8 @@ "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 263, - "revision": 1 + "0": 263, + "1": 1 } ], "1/29/1": [ diff --git a/tests/components/matter/fixtures/nodes/on-off-plugin-unit.json b/tests/components/matter/fixtures/nodes/on-off-plugin-unit.json index 43ba486bc29..8d523f5443a 100644 --- a/tests/components/matter/fixtures/nodes/on-off-plugin-unit.json +++ b/tests/components/matter/fixtures/nodes/on-off-plugin-unit.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-onoff-plugin-unit", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -118,8 +118,8 @@ ], "1/29/0": [ { - "deviceType": 266, - "revision": 1 + "0": 266, + "1": 1 } ], "1/29/1": [ diff --git a/tests/components/matter/fixtures/nodes/onoff-light-alt-name.json b/tests/components/matter/fixtures/nodes/onoff-light-alt-name.json index f29361da128..3f6e83ca460 100644 --- a/tests/components/matter/fixtures/nodes/onoff-light-alt-name.json +++ b/tests/components/matter/fixtures/nodes/onoff-light-alt-name.json @@ -29,11 +29,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -65,8 +65,8 @@ "0/40/17": true, "0/40/18": "mock-onoff-light", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -111,8 +111,8 @@ "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -125,8 +125,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "", - "connected": true + "0": "", + "1": true } ], "0/49/2": 10, @@ -147,14 +147,14 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "", - "IPv4Addresses": [""], - "IPv6Addresses": [], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "", + "5": [""], + "6": [], + "7": 1 } ], "0/51/1": 6, @@ -243,19 +243,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "", - "icac": "", - "fabricIndex": 1 + "1": "", + "2": "", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 1 + "1": "", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 1 } ], "0/62/2": 5, @@ -278,20 +278,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, diff --git a/tests/components/matter/fixtures/nodes/onoff-light-no-name.json b/tests/components/matter/fixtures/nodes/onoff-light-no-name.json index 8a1134409a9..18cb68c8926 100644 --- a/tests/components/matter/fixtures/nodes/onoff-light-no-name.json +++ b/tests/components/matter/fixtures/nodes/onoff-light-no-name.json @@ -29,11 +29,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -65,8 +65,8 @@ "0/40/17": true, "0/40/18": "mock-onoff-light", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -111,8 +111,8 @@ "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -125,8 +125,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "", - "connected": true + "0": "", + "1": true } ], "0/49/2": 10, @@ -147,14 +147,14 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "", - "IPv4Addresses": [""], - "IPv6Addresses": [], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "", + "5": [""], + "6": [], + "7": 1 } ], "0/51/1": 6, @@ -243,19 +243,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "", - "icac": "", - "fabricIndex": 1 + "1": "", + "2": "", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 1 + "1": "", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 1 } ], "0/62/2": 5, @@ -278,20 +278,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, diff --git a/tests/components/matter/fixtures/nodes/onoff-light.json b/tests/components/matter/fixtures/nodes/onoff-light.json index 65ef0be5c8e..eed404ff85d 100644 --- a/tests/components/matter/fixtures/nodes/onoff-light.json +++ b/tests/components/matter/fixtures/nodes/onoff-light.json @@ -12,8 +12,8 @@ "0/4/65531": [0, 65528, 65529, 65531, 65532, 65533], "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -29,11 +29,11 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 1 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 } ], "0/31/1": [], @@ -65,8 +65,8 @@ "0/40/17": true, "0/40/18": "mock-onoff-light", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -111,8 +111,8 @@ "0/44/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -125,8 +125,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "", - "connected": true + "0": "", + "1": true } ], "0/49/2": 10, @@ -147,14 +147,14 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "", - "IPv4Addresses": [""], - "IPv6Addresses": [], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "", + "5": [""], + "6": [], + "7": 1 } ], "0/51/1": 6, @@ -243,19 +243,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "", - "icac": "", - "fabricIndex": 1 + "1": "", + "2": "", + "254": 1 } ], "0/62/1": [ { - "rootPublicKey": "", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 1 + "1": "", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 1 } ], "0/62/2": 5, @@ -278,20 +278,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, @@ -354,8 +354,8 @@ ], "1/29/0": [ { - "deviceType": 257, - "revision": 1 + "0": 257, + "1": 1 } ], "1/29/1": [3, 4, 6, 8, 29, 768, 1030], diff --git a/tests/components/matter/fixtures/nodes/pressure-sensor.json b/tests/components/matter/fixtures/nodes/pressure-sensor.json index a47cda28056..d38ac560ac5 100644 --- a/tests/components/matter/fixtures/nodes/pressure-sensor.json +++ b/tests/components/matter/fixtures/nodes/pressure-sensor.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-pressure-sensor", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -56,8 +56,8 @@ "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 773, - "revision": 1 + "0": 773, + "1": 1 } ], "1/29/1": [6, 29, 57, 768, 8, 40], diff --git a/tests/components/matter/fixtures/nodes/switch-unit.json b/tests/components/matter/fixtures/nodes/switch-unit.json new file mode 100644 index 00000000000..e16f1e406ec --- /dev/null +++ b/tests/components/matter/fixtures/nodes/switch-unit.json @@ -0,0 +1,119 @@ +{ + "node_id": 1, + "date_commissioned": "2022-11-29T21:23:48.485051", + "last_interview": "2022-11-29T21:23:48.485057", + "interview_version": 2, + "attributes": { + "0/29/0": [ + { + "0": 99999, + "1": 1 + } + ], + "0/29/1": [ + 4, 29, 31, 40, 42, 43, 44, 48, 49, 50, 51, 52, 53, 54, 55, 59, 60, 62, 63, + 64, 65 + ], + "0/29/2": [41], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 1, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 1, + "0/40/1": "Nabu Casa", + "0/40/2": 65521, + "0/40/3": "Mock SwitchUnit", + "0/40/4": 32768, + "0/40/5": "Mock SwitchUnit", + "0/40/6": "XX", + "0/40/7": 0, + "0/40/8": "v1.0", + "0/40/9": 1, + "0/40/10": "v1.0", + "0/40/11": "20221206", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "TEST_SN", + "0/40/16": false, + "0/40/17": true, + "0/40/18": "mock-switch-unit", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/65532": 0, + "0/40/65533": 1, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 65528, 65529, 65531, 65532, 65533 + ], + "1/3/0": 0, + "1/3/1": 2, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/4/0": 128, + "1/4/65532": 1, + "1/4/65533": 4, + "1/4/65528": [0, 1, 2, 3], + "1/4/65529": [0, 1, 2, 3, 4, 5], + "1/4/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/5/0": 0, + "1/5/1": 0, + "1/5/2": 0, + "1/5/3": false, + "1/5/4": 0, + "1/5/65532": 0, + "1/5/65533": 4, + "1/5/65528": [0, 1, 2, 3, 4, 6], + "1/5/65529": [0, 1, 2, 3, 4, 5, 6], + "1/5/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "1/6/0": false, + "1/6/16384": true, + "1/6/16385": 0, + "1/6/16386": 0, + "1/6/16387": null, + "1/6/65532": 1, + "1/6/65533": 4, + "1/6/65528": [], + "1/6/65529": [0, 1, 2, 64, 65, 66], + "1/6/65531": [ + 0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533 + ], + "1/7/0": 0, + "1/7/16": 0, + "1/7/65532": 0, + "1/7/65533": 1, + "1/7/65528": [], + "1/7/65529": [], + "1/7/65531": [0, 16, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 9999999, + "1": 1 + } + ], + "1/29/1": [ + 3, 4, 5, 6, 7, 8, 15, 29, 30, 37, 47, 59, 64, 65, 69, 80, 257, 258, 259, + 512, 513, 514, 516, 768, 1024, 1026, 1027, 1028, 1029, 1030, 1283, 1284, + 1285, 1286, 1287, 1288, 1289, 1290, 1291, 1292, 1293, 1294, 2820, + 4294048773 + ], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 1, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533] + }, + "available": true, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/temperature-sensor.json b/tests/components/matter/fixtures/nodes/temperature-sensor.json index c7d372ac2d7..0abb366f81b 100644 --- a/tests/components/matter/fixtures/nodes/temperature-sensor.json +++ b/tests/components/matter/fixtures/nodes/temperature-sensor.json @@ -6,8 +6,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -41,8 +41,8 @@ "0/40/17": true, "0/40/18": "mock-temperature-sensor", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -61,8 +61,8 @@ "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 770, - "revision": 1 + "0": 770, + "1": 1 } ], "1/29/1": [6, 29, 57, 768, 8, 40], diff --git a/tests/components/matter/fixtures/nodes/thermostat.json b/tests/components/matter/fixtures/nodes/thermostat.json index 85ac42e5429..a7abff41331 100644 --- a/tests/components/matter/fixtures/nodes/thermostat.json +++ b/tests/components/matter/fixtures/nodes/thermostat.json @@ -8,8 +8,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [29, 31, 40, 42, 48, 49, 50, 51, 54, 60, 62, 63, 64], @@ -22,18 +22,18 @@ "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 1 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 1 }, { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 2 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 2 } ], "0/31/1": [], @@ -64,8 +64,8 @@ "0/40/17": true, "0/40/18": "3D06D025F9E026A0", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -86,8 +86,8 @@ "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -100,8 +100,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "TE9OR0FOLUlPVA==", - "connected": true + "0": "TE9OR0FOLUlPVA==", + "1": true } ], "0/49/2": 10, @@ -122,18 +122,18 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "3FR1X7qs", - "IPv4Addresses": ["wKgI7g=="], - "IPv6Addresses": [ + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "3FR1X7qs", + "5": ["wKgI7g=="], + "6": [ "/oAAAAAAAADeVHX//l+6rA==", "JA4DsgZ9jUDeVHX//l+6rA==", "/UgvJAe/AADeVHX//l+6rA==" ], - "type": 1 + "7": 1 } ], "0/51/1": 4, @@ -182,32 +182,32 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "", - "icac": null, - "fabricIndex": 1 + "1": "", + "2": null, + "254": 1 }, { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRBBgkBwEkCAEwCUEETaqdhs6MRkbh8fdh4EEImZaziiE6anaVp6Mu3P/zIJUB0fHUMxydKRTAC8bIn7vUhBCM47OYlYTkX0zFhoKYrzcKNQEoARgkAgE2AwQCBAEYMAQUrouBLuksQTkLrFhNVAbTHkNvMSEwBRTPlgMACvPdpqPOzuvR0OfPgfUcxBgwC0AcUInETXp/2gIFGDQF2+u+9WtYtvIfo6C3MhoOIV1SrRBZWYxY3CVjPGK7edTibQrVA4GccZKnHhNSBjxktrPiGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEE+rI5XQyifTZbZRK1Z2DOuXdQkmdUkWklTv+G1x4ZfbSupbUDo4l7i/iFdyu//uJThAw1GPEkWe6i98IFKCOQpzcKNQEpARgkAmAwBBTPlgMACvPdpqPOzuvR0OfPgfUcxDAFFJQo6UEBWTLtZVYFZwRBgn+qstpTGDALQK3jYiaxwnYJMwTBQlcVNrGxPtuVTZrp5foZtQCp/JEX2ZWqVxKypilx0ES/CfMHZ0Lllv9QsLs8xV/HNLidllkY", - "fabricIndex": 2 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRBBgkBwEkCAEwCUEETaqdhs6MRkbh8fdh4EEImZaziiE6anaVp6Mu3P/zIJUB0fHUMxydKRTAC8bIn7vUhBCM47OYlYTkX0zFhoKYrzcKNQEoARgkAgE2AwQCBAEYMAQUrouBLuksQTkLrFhNVAbTHkNvMSEwBRTPlgMACvPdpqPOzuvR0OfPgfUcxBgwC0AcUInETXp/2gIFGDQF2+u+9WtYtvIfo6C3MhoOIV1SrRBZWYxY3CVjPGK7edTibQrVA4GccZKnHhNSBjxktrPiGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEE+rI5XQyifTZbZRK1Z2DOuXdQkmdUkWklTv+G1x4ZfbSupbUDo4l7i/iFdyu//uJThAw1GPEkWe6i98IFKCOQpzcKNQEpARgkAmAwBBTPlgMACvPdpqPOzuvR0OfPgfUcxDAFFJQo6UEBWTLtZVYFZwRBgn+qstpTGDALQK3jYiaxwnYJMwTBQlcVNrGxPtuVTZrp5foZtQCp/JEX2ZWqVxKypilx0ES/CfMHZ0Lllv9QsLs8xV/HNLidllkY", + "254": 2 } ], "0/62/1": [ { - "rootPublicKey": "BAP9BJt5aQ9N98ClPTdNxpMZ1/Vh8r9usw6C8Ygi79AImsJq4UjAaYad0UI9Lh0OmRA9sWE2aSPbHjf409i/970=", - "vendorID": 4996, - "fabricID": 1, - "nodeID": 1425709672, - "label": "", - "fabricIndex": 1 + "1": "BAP9BJt5aQ9N98ClPTdNxpMZ1/Vh8r9usw6C8Ygi79AImsJq4UjAaYad0UI9Lh0OmRA9sWE2aSPbHjf409i/970=", + "2": 4996, + "3": 1, + "4": 1425709672, + "5": "", + "254": 1 }, { - "rootPublicKey": "BJXfyipMp+Jx4pkoTnvYoAYODis4xJktKdQXu8MSpBLIwII58BD0KkIG9NmuHcp0xUQKzqlfyB/bkAanevO73ZI=", - "vendorID": 65521, - "fabricID": 1, - "nodeID": 4, - "label": "", - "fabricIndex": 2 + "1": "BJXfyipMp+Jx4pkoTnvYoAYODis4xJktKdQXu8MSpBLIwII58BD0KkIG9NmuHcp0xUQKzqlfyB/bkAanevO73ZI=", + "2": 65521, + "3": 1, + "4": 4, + "5": "", + "254": 2 } ], "0/62/2": 5, @@ -233,20 +233,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, @@ -275,8 +275,8 @@ "1/6/65531": [0, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 769, - "revision": 1 + "0": 769, + "1": 1 } ], "1/29/1": [3, 4, 6, 29, 30, 64, 513, 514, 516], @@ -295,20 +295,20 @@ "1/30/65531": [0, 65528, 65529, 65531, 65532, 65533], "1/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "1/64/65532": 0, diff --git a/tests/components/matter/fixtures/nodes/window-covering_full.json b/tests/components/matter/fixtures/nodes/window-covering_full.json index feb75409526..fc6efe2077c 100644 --- a/tests/components/matter/fixtures/nodes/window-covering_full.json +++ b/tests/components/matter/fixtures/nodes/window-covering_full.json @@ -8,8 +8,8 @@ "0/29/65533": 1, "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [29, 31, 40, 48, 49, 51, 60, 62, 63, 54], @@ -22,25 +22,25 @@ "0/31/65533": 1, "0/31/0": [ { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 1 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 1 }, { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 2 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 2 }, { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 3 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 3 } ], "0/31/2": 4, @@ -71,8 +71,8 @@ "0/40/17": true, "0/40/18": "mock-full-window-covering", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65533": 1, "0/40/65528": [], @@ -84,8 +84,8 @@ "0/48/2": 0, "0/48/3": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/4": true, "0/48/65533": 1, @@ -96,8 +96,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "MTI2MDk5", - "connected": true + "0": "MTI2MDk5", + "1": true } ], "0/49/2": 10, @@ -113,14 +113,14 @@ "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "JG8olrDo", - "IPv4Addresses": ["wKgBFw=="], - "IPv6Addresses": ["/oAAAAAAAAAmbyj//paw6A=="], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "JG8olrDo", + "5": ["wKgBFw=="], + "6": ["/oAAAAAAAAAmbyj//paw6A=="], + "7": 1 } ], "0/51/1": 1, @@ -141,47 +141,47 @@ "0/62/65532": 0, "0/62/0": [ { - "noc": "", - "icac": null, - "fabricIndex": 1 + "1": "", + "2": null, + "254": 1 }, { - "noc": "", - "icac": null, - "fabricIndex": 2 + "1": "", + "2": null, + "254": 2 }, { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRMhgkBwEkCAEwCUEE+5TLtucQZ8l7Y5r8nKhYB0mia0RMn+RJa5AtRIPb2R9ixMcQXfQBANdHPCwsfTGWyjBYzPXG1yDUTUz+Z1J9aTcKNQEoARgkAgE2AwQCBAEYMAQUh/lTccn18xJ1JqA9VRHdr2+IhscwBRTPeGj+EyBBTsdlJC4zNSP/tIcpFhgwC0AoRjZKvJRkg+Cz77N6+IIQBt0i1Oco92N/XzoDWtgUVIOW5qvPcUUI/tiYAEDdefy2/6XpjU1Y7ecN3vgoTdNUGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEL6dfjjyZxKHsFjZvYUOhWsOCI/2ucOxcCZGFaJwG0vXhL5/aDhR/AF907lF93LR1Huvp3NJsB0oxqsNnbEz8jjcKNQEpARgkAmAwBBTPeGj+EyBBTsdlJC4zNSP/tIcpFjAFFC8Br9IClyBL3e7po3G+QXNGsBoYGDALQIHEwwdIaYHnFzpYngW9g+7Cn3gl0qKnetK5gWUVVTdVtpx6dYBblvPnOU+5K3Ow85llzcRxU1yXgPAM77s7t8gY", - "fabricIndex": 3 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRMhgkBwEkCAEwCUEE+5TLtucQZ8l7Y5r8nKhYB0mia0RMn+RJa5AtRIPb2R9ixMcQXfQBANdHPCwsfTGWyjBYzPXG1yDUTUz+Z1J9aTcKNQEoARgkAgE2AwQCBAEYMAQUh/lTccn18xJ1JqA9VRHdr2+IhscwBRTPeGj+EyBBTsdlJC4zNSP/tIcpFhgwC0AoRjZKvJRkg+Cz77N6+IIQBt0i1Oco92N/XzoDWtgUVIOW5qvPcUUI/tiYAEDdefy2/6XpjU1Y7ecN3vgoTdNUGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEL6dfjjyZxKHsFjZvYUOhWsOCI/2ucOxcCZGFaJwG0vXhL5/aDhR/AF907lF93LR1Huvp3NJsB0oxqsNnbEz8jjcKNQEpARgkAmAwBBTPeGj+EyBBTsdlJC4zNSP/tIcpFjAFFC8Br9IClyBL3e7po3G+QXNGsBoYGDALQIHEwwdIaYHnFzpYngW9g+7Cn3gl0qKnetK5gWUVVTdVtpx6dYBblvPnOU+5K3Ow85llzcRxU1yXgPAM77s7t8gY", + "254": 3 } ], "0/62/2": 5, "0/62/3": 3, "0/62/1": [ { - "rootPublicKey": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", - "vendorId": 24582, - "fabricId": 7331465149450221740, - "nodeId": 3429688654, - "label": "", - "fabricIndex": 1 + "1": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", + "2": 24582, + "3": 7331465149450221740, + "4": 3429688654, + "5": "", + "254": 1 }, { - "rootPublicKey": "BJyJ1DODbJ+HellxuG3J/EstNpyw/i5h1x5qjNLQjwnPZoEaLLMZ8KKN7/rxQy3JUIkfuQydJz7JXeF80mES8q8=", - "vendorId": 4362, - "fabricId": 8516517930550670493, - "nodeId": 1443093566726981311, - "label": "", - "fabricIndex": 2 + "1": "BJyJ1DODbJ+HellxuG3J/EstNpyw/i5h1x5qjNLQjwnPZoEaLLMZ8KKN7/rxQy3JUIkfuQydJz7JXeF80mES8q8=", + "2": 4362, + "3": 8516517930550670493, + "4": 1443093566726981311, + "5": "", + "254": 2 }, { - "rootPublicKey": "BFOpRqEk+HJ6n/NtUtaWTQVVwstz9QRDK2xvRP6qKZKX3Rk05Zie5Ux9PdjgE1K5zE9NIP2jHHcVJjRBVZxNFz0=", - "vendorId": 4939, - "fabricId": 2, - "nodeId": 50, - "label": "", - "fabricIndex": 3 + "1": "BFOpRqEk+HJ6n/NtUtaWTQVVwstz9QRDK2xvRP6qKZKX3Rk05Zie5Ux9PdjgE1K5zE9NIP2jHHcVJjRBVZxNFz0=", + "2": 4939, + "3": 2, + "4": 50, + "5": "", + "254": 3 } ], "0/62/4": [ @@ -216,8 +216,8 @@ "1/29/65533": 1, "1/29/0": [ { - "deviceType": 514, - "revision": 2 + "0": 514, + "1": 2 } ], "1/29/1": [29, 3, 258], diff --git a/tests/components/matter/fixtures/nodes/window-covering_lift.json b/tests/components/matter/fixtures/nodes/window-covering_lift.json index afc2a2f734f..9c58869e988 100644 --- a/tests/components/matter/fixtures/nodes/window-covering_lift.json +++ b/tests/components/matter/fixtures/nodes/window-covering_lift.json @@ -8,8 +8,8 @@ "0/29/65533": 1, "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [29, 31, 40, 48, 49, 51, 60, 62, 63, 54], @@ -22,25 +22,25 @@ "0/31/65533": 1, "0/31/0": [ { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 1 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 1 }, { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 2 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 2 }, { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 3 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 3 } ], "0/31/2": 4, @@ -71,8 +71,8 @@ "0/40/17": true, "0/40/18": "mock-lift-window-covering", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65533": 1, "0/40/65528": [], @@ -84,8 +84,8 @@ "0/48/2": 0, "0/48/3": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/4": true, "0/48/65533": 1, @@ -96,8 +96,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "MTI2MDk5", - "connected": true + "0": "MTI2MDk5", + "1": true } ], "0/49/2": 10, @@ -113,14 +113,14 @@ "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "JG8olrDo", - "IPv4Addresses": ["wKgBFw=="], - "IPv6Addresses": ["/oAAAAAAAAAmbyj//paw6A=="], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "JG8olrDo", + "5": ["wKgBFw=="], + "6": ["/oAAAAAAAAAmbyj//paw6A=="], + "7": 1 } ], "0/51/1": 1, @@ -141,47 +141,47 @@ "0/62/65532": 0, "0/62/0": [ { - "noc": "", - "icac": null, - "fabricIndex": 1 + "1": "", + "2": null, + "254": 1 }, { - "noc": "", - "icac": null, - "fabricIndex": 2 + "1": "", + "2": null, + "254": 2 }, { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRMhgkBwEkCAEwCUEE+5TLtucQZ8l7Y5r8nKhYB0mia0RMn+RJa5AtRIPb2R9ixMcQXfQBANdHPCwsfTGWyjBYzPXG1yDUTUz+Z1J9aTcKNQEoARgkAgE2AwQCBAEYMAQUh/lTccn18xJ1JqA9VRHdr2+IhscwBRTPeGj+EyBBTsdlJC4zNSP/tIcpFhgwC0AoRjZKvJRkg+Cz77N6+IIQBt0i1Oco92N/XzoDWtgUVIOW5qvPcUUI/tiYAEDdefy2/6XpjU1Y7ecN3vgoTdNUGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEL6dfjjyZxKHsFjZvYUOhWsOCI/2ucOxcCZGFaJwG0vXhL5/aDhR/AF907lF93LR1Huvp3NJsB0oxqsNnbEz8jjcKNQEpARgkAmAwBBTPeGj+EyBBTsdlJC4zNSP/tIcpFjAFFC8Br9IClyBL3e7po3G+QXNGsBoYGDALQIHEwwdIaYHnFzpYngW9g+7Cn3gl0qKnetK5gWUVVTdVtpx6dYBblvPnOU+5K3Ow85llzcRxU1yXgPAM77s7t8gY", - "fabricIndex": 3 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRMhgkBwEkCAEwCUEE+5TLtucQZ8l7Y5r8nKhYB0mia0RMn+RJa5AtRIPb2R9ixMcQXfQBANdHPCwsfTGWyjBYzPXG1yDUTUz+Z1J9aTcKNQEoARgkAgE2AwQCBAEYMAQUh/lTccn18xJ1JqA9VRHdr2+IhscwBRTPeGj+EyBBTsdlJC4zNSP/tIcpFhgwC0AoRjZKvJRkg+Cz77N6+IIQBt0i1Oco92N/XzoDWtgUVIOW5qvPcUUI/tiYAEDdefy2/6XpjU1Y7ecN3vgoTdNUGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEL6dfjjyZxKHsFjZvYUOhWsOCI/2ucOxcCZGFaJwG0vXhL5/aDhR/AF907lF93LR1Huvp3NJsB0oxqsNnbEz8jjcKNQEpARgkAmAwBBTPeGj+EyBBTsdlJC4zNSP/tIcpFjAFFC8Br9IClyBL3e7po3G+QXNGsBoYGDALQIHEwwdIaYHnFzpYngW9g+7Cn3gl0qKnetK5gWUVVTdVtpx6dYBblvPnOU+5K3Ow85llzcRxU1yXgPAM77s7t8gY", + "254": 3 } ], "0/62/2": 5, "0/62/3": 3, "0/62/1": [ { - "rootPublicKey": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", - "vendorId": 24582, - "fabricId": 7331465149450221740, - "nodeId": 3429688654, - "label": "", - "fabricIndex": 1 + "1": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", + "2": 24582, + "3": 7331465149450221740, + "4": 3429688654, + "5": "", + "254": 1 }, { - "rootPublicKey": "BJyJ1DODbJ+HellxuG3J/EstNpyw/i5h1x5qjNLQjwnPZoEaLLMZ8KKN7/rxQy3JUIkfuQydJz7JXeF80mES8q8=", - "vendorId": 4362, - "fabricId": 8516517930550670493, - "nodeId": 1443093566726981311, - "label": "", - "fabricIndex": 2 + "1": "BJyJ1DODbJ+HellxuG3J/EstNpyw/i5h1x5qjNLQjwnPZoEaLLMZ8KKN7/rxQy3JUIkfuQydJz7JXeF80mES8q8=", + "2": 4362, + "3": 8516517930550670493, + "4": 1443093566726981311, + "5": "", + "254": 2 }, { - "rootPublicKey": "BFOpRqEk+HJ6n/NtUtaWTQVVwstz9QRDK2xvRP6qKZKX3Rk05Zie5Ux9PdjgE1K5zE9NIP2jHHcVJjRBVZxNFz0=", - "vendorId": 4939, - "fabricId": 2, - "nodeId": 50, - "label": "", - "fabricIndex": 3 + "1": "BFOpRqEk+HJ6n/NtUtaWTQVVwstz9QRDK2xvRP6qKZKX3Rk05Zie5Ux9PdjgE1K5zE9NIP2jHHcVJjRBVZxNFz0=", + "2": 4939, + "3": 2, + "4": 50, + "5": "", + "254": 3 } ], "0/62/4": [ @@ -216,8 +216,8 @@ "1/29/65533": 1, "1/29/0": [ { - "deviceType": 514, - "revision": 2 + "0": 514, + "1": 2 } ], "1/29/1": [29, 3, 258], diff --git a/tests/components/matter/fixtures/nodes/window-covering_pa-lift.json b/tests/components/matter/fixtures/nodes/window-covering_pa-lift.json index 8d3335bbd6c..fe970b6ed6b 100644 --- a/tests/components/matter/fixtures/nodes/window-covering_pa-lift.json +++ b/tests/components/matter/fixtures/nodes/window-covering_pa-lift.json @@ -7,8 +7,8 @@ "attributes": { "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [ @@ -29,11 +29,11 @@ "0/30/65531": [0, 65528, 65529, 65531, 65532, 65533], "0/31/0": [ { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 2 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 2 } ], "0/31/1": [], @@ -65,8 +65,8 @@ "0/40/17": true, "0/40/18": "7630EF9998EDF03C", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65532": 0, "0/40/65533": 1, @@ -117,8 +117,8 @@ "0/45/65531": [0, 65528, 65529, 65531, 65532, 65533], "0/48/0": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/2": 0, "0/48/3": 0, @@ -131,8 +131,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "TE9OR0FOLUlPVA==", - "connected": true + "0": "TE9OR0FOLUlPVA==", + "1": true } ], "0/49/2": 10, @@ -153,17 +153,14 @@ "0/50/65531": [65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "hPcDB5/k", - "IPv4Addresses": ["wKgIhg=="], - "IPv6Addresses": [ - "/oAAAAAAAACG9wP//gef5A==", - "JA4DsgZ+bsCG9wP//gef5A==" - ], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "hPcDB5/k", + "5": ["wKgIhg=="], + "6": ["/oAAAAAAAACG9wP//gef5A==", "JA4DsgZ+bsCG9wP//gef5A=="], + "7": 1 } ], "0/51/1": 35, @@ -201,19 +198,19 @@ "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], "0/62/0": [ { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEE5Rw88GvXEUXr+cPYgKd00rIWyiHM8eu4Bhrzf1v83yBI2Qa+pwfOsKyvzxiuHLMfzhdC3gre4najpimi8AsX+TcKNQEoARgkAgE2AwQCBAEYMAQUWh6NlHAMbG5gz+vqlF51fulr3z8wBRR+D1hE33RhFC/mJWrhhZs6SVStQBgwC0DD5IxVgOrftUA47K1bQHaCNuWqIxf/8oMfcI0nMvTtXApwbBAJI/LjjCwMZJVFBE3W/FC6dQWSEuF8ES745tLBGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEzpstYxy3lXF69g6H2vQ6uoqkdUsppJ4NcSyQcXQ8sQrF5HuzoVnDpevHfy0GAWHbXfE4VI0laTHvm/Wkj037ZjcKNQEpARgkAmAwBBR+D1hE33RhFC/mJWrhhZs6SVStQDAFFFCCK5NYv6CrD5/0S26zXBUwG0WBGDALQI5YKo3C3xvdqCrho2yZIJVJpJY2n9V/tmh7ESBBOHrY0b+K8Pf7hKhd5V0vzbCCbkhv1BNEne+lhcS2N6qhMNgY", - "fabricIndex": 2 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRARgkBwEkCAEwCUEE5Rw88GvXEUXr+cPYgKd00rIWyiHM8eu4Bhrzf1v83yBI2Qa+pwfOsKyvzxiuHLMfzhdC3gre4najpimi8AsX+TcKNQEoARgkAgE2AwQCBAEYMAQUWh6NlHAMbG5gz+vqlF51fulr3z8wBRR+D1hE33RhFC/mJWrhhZs6SVStQBgwC0DD5IxVgOrftUA47K1bQHaCNuWqIxf/8oMfcI0nMvTtXApwbBAJI/LjjCwMZJVFBE3W/FC6dQWSEuF8ES745tLBGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEzpstYxy3lXF69g6H2vQ6uoqkdUsppJ4NcSyQcXQ8sQrF5HuzoVnDpevHfy0GAWHbXfE4VI0laTHvm/Wkj037ZjcKNQEpARgkAmAwBBR+D1hE33RhFC/mJWrhhZs6SVStQDAFFFCCK5NYv6CrD5/0S26zXBUwG0WBGDALQI5YKo3C3xvdqCrho2yZIJVJpJY2n9V/tmh7ESBBOHrY0b+K8Pf7hKhd5V0vzbCCbkhv1BNEne+lhcS2N6qhMNgY", + "254": 2 } ], "0/62/1": [ { - "rootPublicKey": "BFLMrM1satBpU0DN4sri/S4AVo/ugmZCndBfPO33Q+ZCKDZzNhMOB014+hZs0KL7vPssavT7Tb9nt0W+kpeAe0U=", - "vendorId": 65521, - "fabricId": 1, - "nodeId": 1, - "label": "", - "fabricIndex": 2 + "1": "BFLMrM1satBpU0DN4sri/S4AVo/ugmZCndBfPO33Q+ZCKDZzNhMOB014+hZs0KL7vPssavT7Tb9nt0W+kpeAe0U=", + "2": 65521, + "3": 1, + "4": 1, + "5": "", + "254": 2 } ], "0/62/2": 5, @@ -239,20 +236,20 @@ "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], "0/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "0/64/65532": 0, @@ -281,8 +278,8 @@ "1/4/65531": [0, 65528, 65529, 65531, 65532, 65533], "1/29/0": [ { - "deviceType": 514, - "revision": 1 + "0": 514, + "1": 1 } ], "1/29/1": [3, 4, 29, 30, 64, 65, 258], @@ -301,20 +298,20 @@ "1/30/65531": [0, 65528, 65529, 65531, 65532, 65533], "1/64/0": [ { - "label": "room", - "value": "bedroom 2" + "0": "room", + "1": "bedroom 2" }, { - "label": "orientation", - "value": "North" + "0": "orientation", + "1": "North" }, { - "label": "floor", - "value": "2" + "0": "floor", + "1": "2" }, { - "label": "direction", - "value": "up" + "0": "direction", + "1": "up" } ], "1/64/65532": 0, diff --git a/tests/components/matter/fixtures/nodes/window-covering_pa-tilt.json b/tests/components/matter/fixtures/nodes/window-covering_pa-tilt.json index 44347dbd964..92a1d820d2e 100644 --- a/tests/components/matter/fixtures/nodes/window-covering_pa-tilt.json +++ b/tests/components/matter/fixtures/nodes/window-covering_pa-tilt.json @@ -8,8 +8,8 @@ "0/29/65533": 1, "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [29, 31, 40, 48, 49, 51, 60, 62, 63, 54], @@ -22,25 +22,25 @@ "0/31/65533": 1, "0/31/0": [ { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 1 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 1 }, { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 2 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 2 }, { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 3 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 3 } ], "0/31/2": 4, @@ -71,8 +71,8 @@ "0/40/17": true, "0/40/18": "mock_pa_tilt_window_covering", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65533": 1, "0/40/65528": [], @@ -84,8 +84,8 @@ "0/48/2": 0, "0/48/3": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/4": true, "0/48/65533": 1, @@ -96,8 +96,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "MTI2MDk5", - "connected": true + "0": "MTI2MDk5", + "1": true } ], "0/49/2": 10, @@ -113,14 +113,14 @@ "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "JG8olrDo", - "IPv4Addresses": ["wKgBFw=="], - "IPv6Addresses": ["/oAAAAAAAAAmbyj//paw6A=="], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "JG8olrDo", + "5": ["wKgBFw=="], + "6": ["/oAAAAAAAAAmbyj//paw6A=="], + "7": 1 } ], "0/51/1": 1, @@ -141,47 +141,47 @@ "0/62/65532": 0, "0/62/0": [ { - "noc": "", - "icac": null, - "fabricIndex": 1 + "1": "", + "2": null, + "254": 1 }, { - "noc": "", - "icac": null, - "fabricIndex": 2 + "1": "", + "2": null, + "254": 2 }, { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRMhgkBwEkCAEwCUEE+5TLtucQZ8l7Y5r8nKhYB0mia0RMn+RJa5AtRIPb2R9ixMcQXfQBANdHPCwsfTGWyjBYzPXG1yDUTUz+Z1J9aTcKNQEoARgkAgE2AwQCBAEYMAQUh/lTccn18xJ1JqA9VRHdr2+IhscwBRTPeGj+EyBBTsdlJC4zNSP/tIcpFhgwC0AoRjZKvJRkg+Cz77N6+IIQBt0i1Oco92N/XzoDWtgUVIOW5qvPcUUI/tiYAEDdefy2/6XpjU1Y7ecN3vgoTdNUGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEL6dfjjyZxKHsFjZvYUOhWsOCI/2ucOxcCZGFaJwG0vXhL5/aDhR/AF907lF93LR1Huvp3NJsB0oxqsNnbEz8jjcKNQEpARgkAmAwBBTPeGj+EyBBTsdlJC4zNSP/tIcpFjAFFC8Br9IClyBL3e7po3G+QXNGsBoYGDALQIHEwwdIaYHnFzpYngW9g+7Cn3gl0qKnetK5gWUVVTdVtpx6dYBblvPnOU+5K3Ow85llzcRxU1yXgPAM77s7t8gY", - "fabricIndex": 3 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRMhgkBwEkCAEwCUEE+5TLtucQZ8l7Y5r8nKhYB0mia0RMn+RJa5AtRIPb2R9ixMcQXfQBANdHPCwsfTGWyjBYzPXG1yDUTUz+Z1J9aTcKNQEoARgkAgE2AwQCBAEYMAQUh/lTccn18xJ1JqA9VRHdr2+IhscwBRTPeGj+EyBBTsdlJC4zNSP/tIcpFhgwC0AoRjZKvJRkg+Cz77N6+IIQBt0i1Oco92N/XzoDWtgUVIOW5qvPcUUI/tiYAEDdefy2/6XpjU1Y7ecN3vgoTdNUGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEL6dfjjyZxKHsFjZvYUOhWsOCI/2ucOxcCZGFaJwG0vXhL5/aDhR/AF907lF93LR1Huvp3NJsB0oxqsNnbEz8jjcKNQEpARgkAmAwBBTPeGj+EyBBTsdlJC4zNSP/tIcpFjAFFC8Br9IClyBL3e7po3G+QXNGsBoYGDALQIHEwwdIaYHnFzpYngW9g+7Cn3gl0qKnetK5gWUVVTdVtpx6dYBblvPnOU+5K3Ow85llzcRxU1yXgPAM77s7t8gY", + "254": 3 } ], "0/62/2": 5, "0/62/3": 3, "0/62/1": [ { - "rootPublicKey": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", - "vendorId": 24582, - "fabricId": 7331465149450221740, - "nodeId": 3429688654, - "label": "", - "fabricIndex": 1 + "1": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", + "2": 24582, + "3": 7331465149450221740, + "4": 3429688654, + "5": "", + "254": 1 }, { - "rootPublicKey": "BJyJ1DODbJ+HellxuG3J/EstNpyw/i5h1x5qjNLQjwnPZoEaLLMZ8KKN7/rxQy3JUIkfuQydJz7JXeF80mES8q8=", - "vendorId": 4362, - "fabricId": 8516517930550670493, - "nodeId": 1443093566726981311, - "label": "", - "fabricIndex": 2 + "1": "BJyJ1DODbJ+HellxuG3J/EstNpyw/i5h1x5qjNLQjwnPZoEaLLMZ8KKN7/rxQy3JUIkfuQydJz7JXeF80mES8q8=", + "2": 4362, + "3": 8516517930550670493, + "4": 1443093566726981311, + "5": "", + "254": 2 }, { - "rootPublicKey": "BFOpRqEk+HJ6n/NtUtaWTQVVwstz9QRDK2xvRP6qKZKX3Rk05Zie5Ux9PdjgE1K5zE9NIP2jHHcVJjRBVZxNFz0=", - "vendorId": 4939, - "fabricId": 2, - "nodeId": 50, - "label": "", - "fabricIndex": 3 + "1": "BFOpRqEk+HJ6n/NtUtaWTQVVwstz9QRDK2xvRP6qKZKX3Rk05Zie5Ux9PdjgE1K5zE9NIP2jHHcVJjRBVZxNFz0=", + "2": 4939, + "3": 2, + "4": 50, + "5": "", + "254": 3 } ], "0/62/4": [ @@ -216,8 +216,8 @@ "1/29/65533": 1, "1/29/0": [ { - "deviceType": 514, - "revision": 2 + "0": 514, + "1": 2 } ], "1/29/1": [29, 3, 258], diff --git a/tests/components/matter/fixtures/nodes/window-covering_tilt.json b/tests/components/matter/fixtures/nodes/window-covering_tilt.json index a33e0f24c3f..144348b5c76 100644 --- a/tests/components/matter/fixtures/nodes/window-covering_tilt.json +++ b/tests/components/matter/fixtures/nodes/window-covering_tilt.json @@ -8,8 +8,8 @@ "0/29/65533": 1, "0/29/0": [ { - "deviceType": 22, - "revision": 1 + "0": 22, + "1": 1 } ], "0/29/1": [29, 31, 40, 48, 49, 51, 60, 62, 63, 54], @@ -22,25 +22,25 @@ "0/31/65533": 1, "0/31/0": [ { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 1 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 1 }, { - "privilege": 0, - "authMode": 0, - "subjects": null, - "targets": null, - "fabricIndex": 2 + "1": 0, + "2": 0, + "3": null, + "4": null, + "254": 2 }, { - "privilege": 5, - "authMode": 2, - "subjects": [112233], - "targets": null, - "fabricIndex": 3 + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 3 } ], "0/31/2": 4, @@ -71,8 +71,8 @@ "0/40/17": true, "0/40/18": "mock-tilt-window-covering", "0/40/19": { - "caseSessionsPerFabric": 3, - "subscriptionsPerFabric": 3 + "0": 3, + "1": 3 }, "0/40/65533": 1, "0/40/65528": [], @@ -84,8 +84,8 @@ "0/48/2": 0, "0/48/3": 0, "0/48/1": { - "failSafeExpiryLengthSeconds": 60, - "maxCumulativeFailsafeSeconds": 900 + "0": 60, + "1": 900 }, "0/48/4": true, "0/48/65533": 1, @@ -96,8 +96,8 @@ "0/49/0": 1, "0/49/1": [ { - "networkID": "MTI2MDk5", - "connected": true + "0": "MTI2MDk5", + "1": true } ], "0/49/2": 10, @@ -113,14 +113,14 @@ "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], "0/51/0": [ { - "name": "WIFI_STA_DEF", - "isOperational": true, - "offPremiseServicesReachableIPv4": null, - "offPremiseServicesReachableIPv6": null, - "hardwareAddress": "JG8olrDo", - "IPv4Addresses": ["wKgBFw=="], - "IPv6Addresses": ["/oAAAAAAAAAmbyj//paw6A=="], - "type": 1 + "0": "WIFI_STA_DEF", + "1": true, + "2": null, + "3": null, + "4": "JG8olrDo", + "5": ["wKgBFw=="], + "6": ["/oAAAAAAAAAmbyj//paw6A=="], + "7": 1 } ], "0/51/1": 1, @@ -141,47 +141,47 @@ "0/62/65532": 0, "0/62/0": [ { - "noc": "", - "icac": null, - "fabricIndex": 1 + "1": "", + "2": null, + "254": 1 }, { - "noc": "", - "icac": null, - "fabricIndex": 2 + "1": "", + "2": null, + "254": 2 }, { - "noc": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRMhgkBwEkCAEwCUEE+5TLtucQZ8l7Y5r8nKhYB0mia0RMn+RJa5AtRIPb2R9ixMcQXfQBANdHPCwsfTGWyjBYzPXG1yDUTUz+Z1J9aTcKNQEoARgkAgE2AwQCBAEYMAQUh/lTccn18xJ1JqA9VRHdr2+IhscwBRTPeGj+EyBBTsdlJC4zNSP/tIcpFhgwC0AoRjZKvJRkg+Cz77N6+IIQBt0i1Oco92N/XzoDWtgUVIOW5qvPcUUI/tiYAEDdefy2/6XpjU1Y7ecN3vgoTdNUGA==", - "icac": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEL6dfjjyZxKHsFjZvYUOhWsOCI/2ucOxcCZGFaJwG0vXhL5/aDhR/AF907lF93LR1Huvp3NJsB0oxqsNnbEz8jjcKNQEpARgkAmAwBBTPeGj+EyBBTsdlJC4zNSP/tIcpFjAFFC8Br9IClyBL3e7po3G+QXNGsBoYGDALQIHEwwdIaYHnFzpYngW9g+7Cn3gl0qKnetK5gWUVVTdVtpx6dYBblvPnOU+5K3Ow85llzcRxU1yXgPAM77s7t8gY", - "fabricIndex": 3 + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRMhgkBwEkCAEwCUEE+5TLtucQZ8l7Y5r8nKhYB0mia0RMn+RJa5AtRIPb2R9ixMcQXfQBANdHPCwsfTGWyjBYzPXG1yDUTUz+Z1J9aTcKNQEoARgkAgE2AwQCBAEYMAQUh/lTccn18xJ1JqA9VRHdr2+IhscwBRTPeGj+EyBBTsdlJC4zNSP/tIcpFhgwC0AoRjZKvJRkg+Cz77N6+IIQBt0i1Oco92N/XzoDWtgUVIOW5qvPcUUI/tiYAEDdefy2/6XpjU1Y7ecN3vgoTdNUGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEL6dfjjyZxKHsFjZvYUOhWsOCI/2ucOxcCZGFaJwG0vXhL5/aDhR/AF907lF93LR1Huvp3NJsB0oxqsNnbEz8jjcKNQEpARgkAmAwBBTPeGj+EyBBTsdlJC4zNSP/tIcpFjAFFC8Br9IClyBL3e7po3G+QXNGsBoYGDALQIHEwwdIaYHnFzpYngW9g+7Cn3gl0qKnetK5gWUVVTdVtpx6dYBblvPnOU+5K3Ow85llzcRxU1yXgPAM77s7t8gY", + "254": 3 } ], "0/62/2": 5, "0/62/3": 3, "0/62/1": [ { - "rootPublicKey": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", - "vendorId": 24582, - "fabricId": 7331465149450221740, - "nodeId": 3429688654, - "label": "", - "fabricIndex": 1 + "1": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", + "2": 24582, + "3": 7331465149450221740, + "4": 3429688654, + "5": "", + "254": 1 }, { - "rootPublicKey": "BJyJ1DODbJ+HellxuG3J/EstNpyw/i5h1x5qjNLQjwnPZoEaLLMZ8KKN7/rxQy3JUIkfuQydJz7JXeF80mES8q8=", - "vendorId": 4362, - "fabricId": 8516517930550670493, - "nodeId": 1443093566726981311, - "label": "", - "fabricIndex": 2 + "1": "BJyJ1DODbJ+HellxuG3J/EstNpyw/i5h1x5qjNLQjwnPZoEaLLMZ8KKN7/rxQy3JUIkfuQydJz7JXeF80mES8q8=", + "2": 4362, + "3": 8516517930550670493, + "4": 1443093566726981311, + "5": "", + "254": 2 }, { - "rootPublicKey": "BFOpRqEk+HJ6n/NtUtaWTQVVwstz9QRDK2xvRP6qKZKX3Rk05Zie5Ux9PdjgE1K5zE9NIP2jHHcVJjRBVZxNFz0=", - "vendorId": 4939, - "fabricId": 2, - "nodeId": 50, - "label": "", - "fabricIndex": 3 + "1": "BFOpRqEk+HJ6n/NtUtaWTQVVwstz9QRDK2xvRP6qKZKX3Rk05Zie5Ux9PdjgE1K5zE9NIP2jHHcVJjRBVZxNFz0=", + "2": 4939, + "3": 2, + "4": 50, + "5": "", + "254": 3 } ], "0/62/4": [ @@ -216,8 +216,8 @@ "1/29/65533": 1, "1/29/0": [ { - "deviceType": 514, - "revision": 2 + "0": 514, + "1": 2 } ], "1/29/1": [29, 3, 258], diff --git a/tests/components/matter/test_binary_sensor.py b/tests/components/matter/test_binary_sensor.py index 4dbb3b27b9c..e231012f90d 100644 --- a/tests/components/matter/test_binary_sensor.py +++ b/tests/components/matter/test_binary_sensor.py @@ -90,6 +90,7 @@ async def test_occupancy_sensor( @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_battery_sensor( hass: HomeAssistant, + entity_registry: er.EntityRegistry, matter_client: MagicMock, door_lock: MatterNode, ) -> None: @@ -108,7 +109,6 @@ async def test_battery_sensor( assert state assert state.state == "on" - entity_registry = er.async_get(hass) entry = entity_registry.async_get(entity_id) assert entry diff --git a/tests/components/matter/test_diagnostics.py b/tests/components/matter/test_diagnostics.py index 303e9879c56..c14eb93f24c 100644 --- a/tests/components/matter/test_diagnostics.py +++ b/tests/components/matter/test_diagnostics.py @@ -81,6 +81,7 @@ async def test_config_entry_diagnostics( async def test_device_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, + device_registry: dr.DeviceRegistry, matter_client: MagicMock, config_entry_diagnostics: dict[str, Any], device_diagnostics: dict[str, Any], @@ -102,8 +103,9 @@ async def test_device_diagnostics( ) matter_client.get_diagnostics.return_value = server_diagnostics config_entry = hass.config_entries.async_entries(DOMAIN)[0] - dev_reg = dr.async_get(hass) - device = dr.async_entries_for_config_entry(dev_reg, config_entry.entry_id)[0] + device = dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)[ + 0 + ] assert device diagnostics = await get_diagnostics_for_device( diff --git a/tests/components/matter/test_helpers.py b/tests/components/matter/test_helpers.py index f7399d6aaf1..61988a37122 100644 --- a/tests/components/matter/test_helpers.py +++ b/tests/components/matter/test_helpers.py @@ -37,10 +37,10 @@ async def test_get_device_id( @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_node_from_device_entry( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, matter_client: MagicMock, ) -> None: """Test get_node_from_device_entry.""" - device_registry = dr.async_get(hass) other_domain = "other_domain" other_config_entry = MockConfigEntry(domain=other_domain) other_config_entry.add_to_hass(hass) @@ -60,16 +60,13 @@ async def test_get_node_from_device_entry( assert node_from_device_entry is node - with pytest.raises(ValueError) as value_error: - await get_node_from_device_entry(hass, other_device_entry) - - assert f"Device {other_device_entry.id} is not a Matter device" in str( - value_error.value - ) + # test non-Matter device returns None + assert get_node_from_device_entry(hass, other_device_entry) is None matter_client.server_info = None + # test non-initialized server raises RuntimeError with pytest.raises(RuntimeError) as runtime_error: - node_from_device_entry = await get_node_from_device_entry(hass, device_entry) + node_from_device_entry = get_node_from_device_entry(hass, device_entry) assert "Matter server information is not available" in str(runtime_error.value) diff --git a/tests/components/matter/test_init.py b/tests/components/matter/test_init.py index bbe77b76af5..2286249bd5d 100644 --- a/tests/components/matter/test_init.py +++ b/tests/components/matter/test_init.py @@ -612,6 +612,8 @@ async def test_remove_entry( @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_remove_config_entry_device( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, matter_client: MagicMock, hass_ws_client: WebSocketGenerator, ) -> None: @@ -621,11 +623,9 @@ async def test_remove_config_entry_device( await hass.async_block_till_done() config_entry = hass.config_entries.async_entries(DOMAIN)[0] - device_registry = dr.async_get(hass) device_entry = dr.async_entries_for_config_entry( device_registry, config_entry.entry_id )[0] - entity_registry = er.async_get(hass) entity_id = "light.m5stamp_lighting_app" assert device_entry @@ -654,6 +654,7 @@ async def test_remove_config_entry_device( @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_remove_config_entry_device_no_node( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, matter_client: MagicMock, integration: MockConfigEntry, hass_ws_client: WebSocketGenerator, @@ -661,7 +662,6 @@ async def test_remove_config_entry_device_no_node( """Test that a device can be removed ok without an existing node.""" assert await async_setup_component(hass, "config", {}) config_entry = integration - device_registry = dr.async_get(hass) device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={ diff --git a/tests/components/matter/test_sensor.py b/tests/components/matter/test_sensor.py index 2650f2b1a6f..0d8f892f992 100644 --- a/tests/components/matter/test_sensor.py +++ b/tests/components/matter/test_sensor.py @@ -187,6 +187,7 @@ async def test_temperature_sensor( @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_battery_sensor( hass: HomeAssistant, + entity_registry: er.EntityRegistry, matter_client: MagicMock, eve_contact_sensor_node: MatterNode, ) -> None: @@ -203,7 +204,6 @@ async def test_battery_sensor( assert state assert state.state == "50" - entity_registry = er.async_get(hass) entry = entity_registry.async_get(entity_id) assert entry diff --git a/tests/components/matter/test_switch.py b/tests/components/matter/test_switch.py index 6fbe5d58f28..ac03d731ee1 100644 --- a/tests/components/matter/test_switch.py +++ b/tests/components/matter/test_switch.py @@ -14,22 +14,30 @@ from .common import ( ) -@pytest.fixture(name="switch_node") -async def switch_node_fixture( +@pytest.fixture(name="powerplug_node") +async def powerplug_node_fixture( hass: HomeAssistant, matter_client: MagicMock ) -> MatterNode: - """Fixture for a switch node.""" + """Fixture for a Powerplug node.""" return await setup_integration_with_node_fixture( hass, "on-off-plugin-unit", matter_client ) +@pytest.fixture(name="switch_unit") +async def switch_unit_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a Switch Unit node.""" + return await setup_integration_with_node_fixture(hass, "switch-unit", matter_client) + + # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_turn_on( hass: HomeAssistant, matter_client: MagicMock, - switch_node: MatterNode, + powerplug_node: MatterNode, ) -> None: """Test turning on a switch.""" state = hass.states.get("switch.mock_onoffpluginunit_powerplug_switch") @@ -47,12 +55,12 @@ async def test_turn_on( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=switch_node.node_id, + node_id=powerplug_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.On(), ) - set_node_attribute(switch_node, 1, 6, 0, True) + set_node_attribute(powerplug_node, 1, 6, 0, True) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("switch.mock_onoffpluginunit_powerplug_switch") @@ -65,7 +73,7 @@ async def test_turn_on( async def test_turn_off( hass: HomeAssistant, matter_client: MagicMock, - switch_node: MatterNode, + powerplug_node: MatterNode, ) -> None: """Test turning off a switch.""" state = hass.states.get("switch.mock_onoffpluginunit_powerplug_switch") @@ -83,7 +91,24 @@ async def test_turn_off( assert matter_client.send_device_command.call_count == 1 assert matter_client.send_device_command.call_args == call( - node_id=switch_node.node_id, + node_id=powerplug_node.node_id, endpoint_id=1, command=clusters.OnOff.Commands.Off(), ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_switch_unit( + hass: HomeAssistant, + matter_client: MagicMock, + switch_unit: MatterNode, +) -> None: + """Test if a switch entity is discovered from any (non-light) OnOf cluster device.""" + # A switch entity should be discovered as fallback for ANY Matter device (endpoint) + # that has the OnOff cluster and does not fall into an explicit discovery schema + # by another platform (e.g. light, lock etc.). + state = hass.states.get("switch.mock_switchunit") + assert state + assert state.state == "off" + assert state.attributes["friendly_name"] == "Mock SwitchUnit" diff --git a/tests/components/maxcube/test_maxcube_binary_sensor.py b/tests/components/maxcube/test_maxcube_binary_sensor.py index 65991f91b7b..0c73c548211 100644 --- a/tests/components/maxcube/test_maxcube_binary_sensor.py +++ b/tests/components/maxcube/test_maxcube_binary_sensor.py @@ -23,10 +23,12 @@ BATTERY_ENTITY_ID = f"{ENTITY_ID}_battery" async def test_window_shuttler( - hass: HomeAssistant, cube: MaxCube, windowshutter: MaxWindowShutter + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + cube: MaxCube, + windowshutter: MaxWindowShutter, ) -> None: """Test a successful setup with a shuttler device.""" - entity_registry = er.async_get(hass) assert entity_registry.async_is_registered(ENTITY_ID) entity = entity_registry.async_get(ENTITY_ID) assert entity.unique_id == "AABBCCDD03" @@ -47,10 +49,12 @@ async def test_window_shuttler( async def test_window_shuttler_battery( - hass: HomeAssistant, cube: MaxCube, windowshutter: MaxWindowShutter + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + cube: MaxCube, + windowshutter: MaxWindowShutter, ) -> None: """Test battery binary_state with a shuttler device.""" - entity_registry = er.async_get(hass) assert entity_registry.async_is_registered(BATTERY_ENTITY_ID) entity = entity_registry.async_get(BATTERY_ENTITY_ID) assert entity.unique_id == "AABBCCDD03_battery" diff --git a/tests/components/maxcube/test_maxcube_climate.py b/tests/components/maxcube/test_maxcube_climate.py index 3682c98e947..f279f049ac3 100644 --- a/tests/components/maxcube/test_maxcube_climate.py +++ b/tests/components/maxcube/test_maxcube_climate.py @@ -60,9 +60,10 @@ WALL_ENTITY_ID = "climate.testroom_testwallthermostat" VALVE_POSITION = "valve_position" -async def test_setup_thermostat(hass: HomeAssistant, cube: MaxCube) -> None: +async def test_setup_thermostat( + hass: HomeAssistant, entity_registry: er.EntityRegistry, cube: MaxCube +) -> None: """Test a successful setup of a thermostat device.""" - entity_registry = er.async_get(hass) assert entity_registry.async_is_registered(ENTITY_ID) entity = entity_registry.async_get(ENTITY_ID) assert entity.unique_id == "AABBCCDD01" @@ -96,9 +97,10 @@ async def test_setup_thermostat(hass: HomeAssistant, cube: MaxCube) -> None: assert state.attributes.get(VALVE_POSITION) == 25 -async def test_setup_wallthermostat(hass: HomeAssistant, cube: MaxCube) -> None: +async def test_setup_wallthermostat( + hass: HomeAssistant, entity_registry: er.EntityRegistry, cube: MaxCube +) -> None: """Test a successful setup of a wall thermostat device.""" - entity_registry = er.async_get(hass) assert entity_registry.async_is_registered(WALL_ENTITY_ID) entity = entity_registry.async_get(WALL_ENTITY_ID) assert entity.unique_id == "AABBCCDD02" diff --git a/tests/components/met/test_init.py b/tests/components/met/test_init.py index 652763947df..0e4e46b09da 100644 --- a/tests/components/met/test_init.py +++ b/tests/components/met/test_init.py @@ -52,13 +52,15 @@ async def test_fail_default_home_entry( async def test_removing_incorrect_devices( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_weather + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, + mock_weather, ) -> None: """Test we remove incorrect devices.""" entry = await init_integration(hass) - device_reg = dr.async_get(hass) - device_reg.async_get_or_create( + device_registry.async_get_or_create( config_entry_id=entry.entry_id, name="Forecast_legacy", entry_type=dr.DeviceEntryType.SERVICE, @@ -71,6 +73,6 @@ async def test_removing_incorrect_devices( assert await hass.config_entries.async_reload(entry.entry_id) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - assert not device_reg.async_get_device(identifiers={(DOMAIN,)}) - assert device_reg.async_get_device(identifiers={(DOMAIN, entry.entry_id)}) + assert not device_registry.async_get_device(identifiers={(DOMAIN,)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, entry.entry_id)}) assert "Removing improper device Forecast_legacy" in caplog.text diff --git a/tests/components/met/test_weather.py b/tests/components/met/test_weather.py index 5a28b8eceb0..432c288383a 100644 --- a/tests/components/met/test_weather.py +++ b/tests/components/met/test_weather.py @@ -6,21 +6,23 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -async def test_new_config_entry(hass: HomeAssistant, mock_weather) -> None: +async def test_new_config_entry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_weather +) -> None: """Test the expected entities are created.""" - registry = er.async_get(hass) await hass.config_entries.flow.async_init("met", context={"source": "onboarding"}) await hass.async_block_till_done() assert len(hass.states.async_entity_ids("weather")) == 1 entry = hass.config_entries.async_entries()[0] - assert len(er.async_entries_for_config_entry(registry, entry.entry_id)) == 1 + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1 -async def test_legacy_config_entry(hass: HomeAssistant, mock_weather) -> None: +async def test_legacy_config_entry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_weather +) -> None: """Test the expected entities are created.""" - registry = er.async_get(hass) - registry.async_get_or_create( + entity_registry.async_get_or_create( WEATHER_DOMAIN, DOMAIN, "home-hourly", @@ -30,7 +32,7 @@ async def test_legacy_config_entry(hass: HomeAssistant, mock_weather) -> None: assert len(hass.states.async_entity_ids("weather")) == 2 entry = hass.config_entries.async_entries()[0] - assert len(er.async_entries_for_config_entry(registry, entry.entry_id)) == 2 + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 2 async def test_tracking_home(hass: HomeAssistant, mock_weather) -> None: diff --git a/tests/components/met_eireann/snapshots/test_weather.ambr b/tests/components/met_eireann/snapshots/test_weather.ambr index 81d7a52aa06..90f36d09d25 100644 --- a/tests/components/met_eireann/snapshots/test_weather.ambr +++ b/tests/components/met_eireann/snapshots/test_weather.ambr @@ -31,6 +31,110 @@ ]), }) # --- +# name: test_forecast_service[forecast] + dict({ + 'weather.somewhere': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].1 + dict({ + 'weather.somewhere': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecasts] + dict({ + 'weather.somewhere': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].1 + dict({ + 'weather.somewhere': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-08T12:00:00+00:00', + 'temperature': 10.0, + }), + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2023-08-09T12:00:00+00:00', + 'temperature': 20.0, + }), + ]), + }), + }) +# --- # name: test_forecast_subscription[daily] list([ dict({ diff --git a/tests/components/met_eireann/test_weather.py b/tests/components/met_eireann/test_weather.py index a3ca1fd55f7..e5c2c66b626 100644 --- a/tests/components/met_eireann/test_weather.py +++ b/tests/components/met_eireann/test_weather.py @@ -9,7 +9,8 @@ from homeassistant.components.met_eireann import UPDATE_INTERVAL from homeassistant.components.met_eireann.const import DOMAIN from homeassistant.components.weather import ( DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -32,20 +33,22 @@ async def setup_config_entry(hass: HomeAssistant) -> ConfigEntry: return mock_data -async def test_new_config_entry(hass: HomeAssistant, mock_weather) -> None: +async def test_new_config_entry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_weather +) -> None: """Test the expected entities are created.""" - registry = er.async_get(hass) await setup_config_entry(hass) assert len(hass.states.async_entity_ids("weather")) == 1 entry = hass.config_entries.async_entries()[0] - assert len(er.async_entries_for_config_entry(registry, entry.entry_id)) == 1 + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1 -async def test_legacy_config_entry(hass: HomeAssistant, mock_weather) -> None: +async def test_legacy_config_entry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_weather +) -> None: """Test the expected entities are created.""" - registry = er.async_get(hass) - registry.async_get_or_create( + entity_registry.async_get_or_create( WEATHER_DOMAIN, DOMAIN, "10-20-hourly", @@ -54,7 +57,7 @@ async def test_legacy_config_entry(hass: HomeAssistant, mock_weather) -> None: assert len(hass.states.async_entity_ids("weather")) == 2 entry = hass.config_entries.async_entries()[0] - assert len(er.async_entries_for_config_entry(registry, entry.entry_id)) == 2 + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 2 async def test_weather(hass: HomeAssistant, mock_weather) -> None: @@ -75,10 +78,18 @@ async def test_weather(hass: HomeAssistant, mock_weather) -> None: assert len(hass.states.async_entity_ids("weather")) == 0 +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) async def test_forecast_service( hass: HomeAssistant, mock_weather, snapshot: SnapshotAssertion, + service: str, ) -> None: """Test multiple forecast.""" mock_weather.get_forecast.return_value = [ @@ -100,7 +111,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": entity_id, "type": "daily", @@ -112,7 +123,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": entity_id, "type": "hourly", diff --git a/tests/components/metoffice/snapshots/test_weather.ambr b/tests/components/metoffice/snapshots/test_weather.ambr index 38df9f04ab2..108a9330403 100644 --- a/tests/components/metoffice/snapshots/test_weather.ambr +++ b/tests/components/metoffice/snapshots/test_weather.ambr @@ -647,6 +647,1988 @@ ]), }) # --- +# name: test_forecast_service[forecast] + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 13.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].1 + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-25T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 19.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T18:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 17.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 14.0, + 'wind_bearing': 'NW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T00:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 13.0, + 'wind_bearing': 'WSW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T03:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T09:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T18:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T00:00:00+00:00', + 'precipitation_probability': 11, + 'temperature': 9.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T03:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 8.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T06:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 8.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 4, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T18:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-27T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T00:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 8.0, + 'wind_bearing': 'NNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 7.0, + 'wind_bearing': 'W', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-28T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 6.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-28T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T15:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T18:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NNE', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T00:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'E', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-29T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 8.0, + 'wind_bearing': 'SSE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T06:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 8.0, + 'wind_bearing': 'SE', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T09:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 10.0, + 'wind_bearing': 'SE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 47, + 'temperature': 12.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'pouring', + 'datetime': '2020-04-29T15:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T18:00:00+00:00', + 'precipitation_probability': 39, + 'temperature': 12.0, + 'wind_bearing': 'SSE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T21:00:00+00:00', + 'precipitation_probability': 19, + 'temperature': 11.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].2 + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 13.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].3 + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-25T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 19.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T18:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 17.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 14.0, + 'wind_bearing': 'NW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T00:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 13.0, + 'wind_bearing': 'WSW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T03:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T09:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T18:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T00:00:00+00:00', + 'precipitation_probability': 11, + 'temperature': 9.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T03:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 8.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T06:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 8.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 4, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T18:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-27T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T00:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 8.0, + 'wind_bearing': 'NNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 7.0, + 'wind_bearing': 'W', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-28T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 6.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-28T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T15:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T18:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NNE', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T00:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'E', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-29T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 8.0, + 'wind_bearing': 'SSE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T06:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 8.0, + 'wind_bearing': 'SE', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T09:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 10.0, + 'wind_bearing': 'SE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 47, + 'temperature': 12.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'pouring', + 'datetime': '2020-04-29T15:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T18:00:00+00:00', + 'precipitation_probability': 39, + 'temperature': 12.0, + 'wind_bearing': 'SSE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T21:00:00+00:00', + 'precipitation_probability': 19, + 'temperature': 11.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].4 + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 13.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-25T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 19.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T18:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 17.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 14.0, + 'wind_bearing': 'NW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T00:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 13.0, + 'wind_bearing': 'WSW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T03:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T09:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T18:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T00:00:00+00:00', + 'precipitation_probability': 11, + 'temperature': 9.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T03:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 8.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T06:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 8.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 4, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T18:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-27T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T00:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 8.0, + 'wind_bearing': 'NNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 7.0, + 'wind_bearing': 'W', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-28T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 6.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-28T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T15:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T18:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NNE', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T00:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'E', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-29T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 8.0, + 'wind_bearing': 'SSE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T06:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 8.0, + 'wind_bearing': 'SE', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T09:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 10.0, + 'wind_bearing': 'SE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 47, + 'temperature': 12.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'pouring', + 'datetime': '2020-04-29T15:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T18:00:00+00:00', + 'precipitation_probability': 39, + 'temperature': 12.0, + 'wind_bearing': 'SSE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T21:00:00+00:00', + 'precipitation_probability': 19, + 'temperature': 11.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 13.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].3 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-25T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 19.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T18:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 17.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 14.0, + 'wind_bearing': 'NW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T00:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 13.0, + 'wind_bearing': 'WSW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T03:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T09:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T18:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T00:00:00+00:00', + 'precipitation_probability': 11, + 'temperature': 9.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T03:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 8.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T06:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 8.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 4, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T18:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-27T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T00:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 8.0, + 'wind_bearing': 'NNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 7.0, + 'wind_bearing': 'W', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-28T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 6.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-28T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T15:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T18:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NNE', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T00:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'E', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-29T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 8.0, + 'wind_bearing': 'SSE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T06:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 8.0, + 'wind_bearing': 'SE', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T09:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 10.0, + 'wind_bearing': 'SE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 47, + 'temperature': 12.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'pouring', + 'datetime': '2020-04-29T15:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T18:00:00+00:00', + 'precipitation_probability': 39, + 'temperature': 12.0, + 'wind_bearing': 'SSE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T21:00:00+00:00', + 'precipitation_probability': 19, + 'temperature': 11.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].4 + dict({ + 'forecast': list([ + ]), + }) +# --- +# name: test_forecast_service[get_forecasts] + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 13.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].1 + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-25T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 19.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T18:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 17.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 14.0, + 'wind_bearing': 'NW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T00:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 13.0, + 'wind_bearing': 'WSW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T03:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T09:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T18:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T00:00:00+00:00', + 'precipitation_probability': 11, + 'temperature': 9.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T03:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 8.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T06:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 8.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 4, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T18:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-27T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T00:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 8.0, + 'wind_bearing': 'NNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 7.0, + 'wind_bearing': 'W', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-28T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 6.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-28T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T15:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T18:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NNE', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T00:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'E', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-29T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 8.0, + 'wind_bearing': 'SSE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T06:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 8.0, + 'wind_bearing': 'SE', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T09:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 10.0, + 'wind_bearing': 'SE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 47, + 'temperature': 12.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'pouring', + 'datetime': '2020-04-29T15:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T18:00:00+00:00', + 'precipitation_probability': 39, + 'temperature': 12.0, + 'wind_bearing': 'SSE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T21:00:00+00:00', + 'precipitation_probability': 19, + 'temperature': 11.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].2 + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 13.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].3 + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-25T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 19.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T18:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 17.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-25T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 14.0, + 'wind_bearing': 'NW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T00:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 13.0, + 'wind_bearing': 'WSW', + 'wind_speed': 3.22, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-26T03:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T09:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T12:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 12.0, + 'wind_bearing': 'WNW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T15:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 12.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T18:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 11.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-26T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T00:00:00+00:00', + 'precipitation_probability': 11, + 'temperature': 9.0, + 'wind_bearing': 'WNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T03:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 8.0, + 'wind_bearing': 'WNW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-27T06:00:00+00:00', + 'precipitation_probability': 14, + 'temperature': 8.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-27T12:00:00+00:00', + 'precipitation_probability': 4, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T15:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-27T18:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 10.0, + 'wind_bearing': 'NW', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-27T21:00:00+00:00', + 'precipitation_probability': 1, + 'temperature': 9.0, + 'wind_bearing': 'NW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T00:00:00+00:00', + 'precipitation_probability': 2, + 'temperature': 8.0, + 'wind_bearing': 'NNW', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2020-04-28T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 7.0, + 'wind_bearing': 'W', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2020-04-28T06:00:00+00:00', + 'precipitation_probability': 5, + 'temperature': 6.0, + 'wind_bearing': 'S', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-28T09:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T12:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'ENE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T15:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 12.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T18:00:00+00:00', + 'precipitation_probability': 10, + 'temperature': 11.0, + 'wind_bearing': 'N', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-28T21:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 10.0, + 'wind_bearing': 'NNE', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T00:00:00+00:00', + 'precipitation_probability': 6, + 'temperature': 9.0, + 'wind_bearing': 'E', + 'wind_speed': 6.44, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2020-04-29T03:00:00+00:00', + 'precipitation_probability': 3, + 'temperature': 8.0, + 'wind_bearing': 'SSE', + 'wind_speed': 11.27, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T06:00:00+00:00', + 'precipitation_probability': 9, + 'temperature': 8.0, + 'wind_bearing': 'SE', + 'wind_speed': 14.48, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T09:00:00+00:00', + 'precipitation_probability': 12, + 'temperature': 10.0, + 'wind_bearing': 'SE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T12:00:00+00:00', + 'precipitation_probability': 47, + 'temperature': 12.0, + 'wind_bearing': 'SE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'pouring', + 'datetime': '2020-04-29T15:00:00+00:00', + 'precipitation_probability': 59, + 'temperature': 13.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2020-04-29T18:00:00+00:00', + 'precipitation_probability': 39, + 'temperature': 12.0, + 'wind_bearing': 'SSE', + 'wind_speed': 17.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2020-04-29T21:00:00+00:00', + 'precipitation_probability': 19, + 'temperature': 11.0, + 'wind_bearing': 'SSE', + 'wind_speed': 20.92, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].4 + dict({ + 'weather.met_office_wavertree_daily': dict({ + 'forecast': list([ + ]), + }), + }) +# --- # name: test_forecast_subscription[weather.met_office_wavertree_3_hourly] list([ dict({ diff --git a/tests/components/metoffice/test_init.py b/tests/components/metoffice/test_init.py index a9e286907d5..10ed0a83f0c 100644 --- a/tests/components/metoffice/test_init.py +++ b/tests/components/metoffice/test_init.py @@ -89,6 +89,7 @@ from tests.common import MockConfigEntry ) async def test_migrate_unique_id( hass: HomeAssistant, + entity_registry: er.EntityRegistry, old_unique_id: str, new_unique_id: str, migration_needed: bool, @@ -102,9 +103,7 @@ async def test_migrate_unique_id( ) entry.add_to_hass(hass) - ent_reg = er.async_get(hass) - - entity: er.RegistryEntry = ent_reg.async_get_or_create( + entity: er.RegistryEntry = entity_registry.async_get_or_create( suggested_object_id="my_sensor", disabled_by=None, domain=SENSOR_DOMAIN, @@ -118,9 +117,12 @@ async def test_migrate_unique_id( await hass.async_block_till_done() if migration_needed: - assert ent_reg.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) is None + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + is None + ) assert ( - ent_reg.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, new_unique_id) + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, new_unique_id) == "sensor.my_sensor" ) diff --git a/tests/components/metoffice/test_weather.py b/tests/components/metoffice/test_weather.py index 8930d318ec7..19c27873d5e 100644 --- a/tests/components/metoffice/test_weather.py +++ b/tests/components/metoffice/test_weather.py @@ -13,7 +13,8 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.metoffice.const import DEFAULT_SCAN_INTERVAL, DOMAIN from homeassistant.components.weather import ( DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, ) from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -100,13 +101,15 @@ async def test_site_cannot_connect( @pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC)) async def test_site_cannot_update( - hass: HomeAssistant, requests_mock: requests_mock.Mocker, wavertree_data + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + requests_mock: requests_mock.Mocker, + wavertree_data, ) -> None: """Test we handle cannot connect error.""" - registry = er.async_get(hass) # Pre-create the hourly entity - registry.async_get_or_create( + entity_registry.async_get_or_create( WEATHER_DOMAIN, DOMAIN, "53.38374_-2.90929", @@ -143,13 +146,15 @@ async def test_site_cannot_update( @pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC)) async def test_one_weather_site_running( - hass: HomeAssistant, requests_mock: requests_mock.Mocker, wavertree_data + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + requests_mock: requests_mock.Mocker, + wavertree_data, ) -> None: """Test the Met Office weather platform.""" - registry = er.async_get(hass) # Pre-create the hourly entity - registry.async_get_or_create( + entity_registry.async_get_or_create( WEATHER_DOMAIN, DOMAIN, "53.38374_-2.90929", @@ -219,19 +224,21 @@ async def test_one_weather_site_running( @pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC)) async def test_two_weather_sites_running( - hass: HomeAssistant, requests_mock: requests_mock.Mocker, wavertree_data + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + requests_mock: requests_mock.Mocker, + wavertree_data, ) -> None: """Test we handle two different weather sites both running.""" - registry = er.async_get(hass) # Pre-create the hourly entities - registry.async_get_or_create( + entity_registry.async_get_or_create( WEATHER_DOMAIN, DOMAIN, "53.38374_-2.90929", suggested_object_id="met_office_wavertree_3_hourly", ) - registry.async_get_or_create( + entity_registry.async_get_or_create( WEATHER_DOMAIN, DOMAIN, "52.75556_0.44231", @@ -369,9 +376,10 @@ async def test_two_weather_sites_running( @pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC)) -async def test_new_config_entry(hass: HomeAssistant, no_sensor, wavertree_data) -> None: +async def test_new_config_entry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, no_sensor, wavertree_data +) -> None: """Test the expected entities are created.""" - registry = er.async_get(hass) entry = MockConfigEntry( domain=DOMAIN, @@ -383,17 +391,16 @@ async def test_new_config_entry(hass: HomeAssistant, no_sensor, wavertree_data) assert len(hass.states.async_entity_ids(WEATHER_DOMAIN)) == 1 entry = hass.config_entries.async_entries()[0] - assert len(er.async_entries_for_config_entry(registry, entry.entry_id)) == 1 + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1 @pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC)) async def test_legacy_config_entry( - hass: HomeAssistant, no_sensor, wavertree_data + hass: HomeAssistant, entity_registry: er.EntityRegistry, no_sensor, wavertree_data ) -> None: """Test the expected entities are created.""" - registry = er.async_get(hass) # Pre-create the hourly entity - registry.async_get_or_create( + entity_registry.async_get_or_create( WEATHER_DOMAIN, DOMAIN, "53.38374_-2.90929", @@ -411,10 +418,17 @@ async def test_legacy_config_entry( assert len(hass.states.async_entity_ids("weather")) == 2 entry = hass.config_entries.async_entries()[0] - assert len(er.async_entries_for_config_entry(registry, entry.entry_id)) == 2 + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 2 @pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC)) +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) async def test_forecast_service( hass: HomeAssistant, freezer: FrozenDateTimeFactory, @@ -422,6 +436,7 @@ async def test_forecast_service( snapshot: SnapshotAssertion, no_sensor, wavertree_data: dict[str, _Matcher], + service: str, ) -> None: """Test multiple forecast.""" entry = MockConfigEntry( @@ -438,7 +453,7 @@ async def test_forecast_service( for forecast_type in ("daily", "hourly"): response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.met_office_wavertree_daily", "type": forecast_type, @@ -446,7 +461,6 @@ async def test_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot # Calling the services should use cached data @@ -464,7 +478,7 @@ async def test_forecast_service( for forecast_type in ("daily", "hourly"): response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.met_office_wavertree_daily", "type": forecast_type, @@ -472,7 +486,6 @@ async def test_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot # Calling the services should update the hourly forecast @@ -488,7 +501,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.met_office_wavertree_daily", "type": "hourly", @@ -496,7 +509,7 @@ async def test_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] == [] + assert response == snapshot @pytest.mark.parametrize( @@ -510,6 +523,7 @@ async def test_forecast_service( async def test_forecast_subscription( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, no_sensor, @@ -519,9 +533,8 @@ async def test_forecast_subscription( """Test multiple forecast.""" client = await hass_ws_client(hass) - registry = er.async_get(hass) # Pre-create the hourly entity - registry.async_get_or_create( + entity_registry.async_get_or_create( WEATHER_DOMAIN, DOMAIN, "53.38374_-2.90929", diff --git a/tests/components/mikrotik/test_device_tracker.py b/tests/components/mikrotik/test_device_tracker.py index 84fcfabffee..55cebaec525 100644 --- a/tests/components/mikrotik/test_device_tracker.py +++ b/tests/components/mikrotik/test_device_tracker.py @@ -208,29 +208,30 @@ async def test_hub_wifiwave2(hass: HomeAssistant, mock_device_registry_devices) assert device_4.attributes["host_name"] == "Device_4" -async def test_restoring_devices(hass: HomeAssistant) -> None: +async def test_restoring_devices( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test restoring existing device_tracker entities if not detected on startup.""" config_entry = MockConfigEntry( domain=mikrotik.DOMAIN, data=MOCK_DATA, options=MOCK_OPTIONS ) config_entry.add_to_hass(hass) - registry = er.async_get(hass) - registry.async_get_or_create( + entity_registry.async_get_or_create( device_tracker.DOMAIN, mikrotik.DOMAIN, "00:00:00:00:00:01", suggested_object_id="device_1", config_entry=config_entry, ) - registry.async_get_or_create( + entity_registry.async_get_or_create( device_tracker.DOMAIN, mikrotik.DOMAIN, "00:00:00:00:00:02", suggested_object_id="device_2", config_entry=config_entry, ) - registry.async_get_or_create( + entity_registry.async_get_or_create( device_tracker.DOMAIN, mikrotik.DOMAIN, "00:00:00:00:00:03", diff --git a/tests/components/mill/test_init.py b/tests/components/mill/test_init.py index 694e9537a8c..15175dedada 100644 --- a/tests/components/mill/test_init.py +++ b/tests/components/mill/test_init.py @@ -115,7 +115,8 @@ async def test_unload_entry(hass: HomeAssistant) -> None: ) as unload_entry, patch( "mill.Mill.fetch_heater_and_sensor_data", return_value={} ), patch( - "mill.Mill.connect", return_value=True + "mill.Mill.connect", + return_value=True, ): assert await async_setup_component(hass, "mill", {}) diff --git a/tests/components/min_max/test_init.py b/tests/components/min_max/test_init.py index 8d8eac5c700..cd07f7060f6 100644 --- a/tests/components/min_max/test_init.py +++ b/tests/components/min_max/test_init.py @@ -11,6 +11,7 @@ from tests.common import MockConfigEntry @pytest.mark.parametrize("platform", ("sensor",)) async def test_setup_and_remove_config_entry( hass: HomeAssistant, + entity_registry: er.EntityRegistry, platform: str, ) -> None: """Test setting up and removing a config entry.""" @@ -19,7 +20,6 @@ async def test_setup_and_remove_config_entry( input_sensors = ["sensor.input_one", "sensor.input_two"] - registry = er.async_get(hass) min_max_entity_id = f"{platform}.my_min_max" # Setup the config entry @@ -39,7 +39,7 @@ async def test_setup_and_remove_config_entry( await hass.async_block_till_done() # Check the entity is registered in the entity registry - assert registry.async_get(min_max_entity_id) is not None + assert entity_registry.async_get(min_max_entity_id) is not None # Check the platform is setup correctly state = hass.states.get(min_max_entity_id) @@ -51,4 +51,4 @@ async def test_setup_and_remove_config_entry( # Check the state and entity registry entry are removed assert hass.states.get(min_max_entity_id) is None - assert registry.async_get(min_max_entity_id) is None + assert entity_registry.async_get(min_max_entity_id) is None diff --git a/tests/components/min_max/test_sensor.py b/tests/components/min_max/test_sensor.py index a742260daff..acd42f9355e 100644 --- a/tests/components/min_max/test_sensor.py +++ b/tests/components/min_max/test_sensor.py @@ -60,7 +60,9 @@ async def test_default_name_sensor(hass: HomeAssistant) -> None: assert entity_ids[2] == state.attributes.get("min_entity_id") -async def test_min_sensor(hass: HomeAssistant) -> None: +async def test_min_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test the min sensor.""" config = { "sensor": { @@ -87,8 +89,7 @@ async def test_min_sensor(hass: HomeAssistant) -> None: assert entity_ids[2] == state.attributes.get("min_entity_id") assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - entity_reg = er.async_get(hass) - entity = entity_reg.async_get("sensor.test_min") + entity = entity_registry.async_get("sensor.test_min") assert entity.unique_id == "very_unique_id" @@ -470,7 +471,9 @@ async def test_sensor_incorrect_state( assert "Unable to store state. Only numerical states are supported" in caplog.text -async def test_sum_sensor(hass: HomeAssistant) -> None: +async def test_sum_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test the sum sensor.""" config = { "sensor": { @@ -496,8 +499,7 @@ async def test_sum_sensor(hass: HomeAssistant) -> None: assert str(float(SUM_VALUE)) == state.state assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - entity_reg = er.async_get(hass) - entity = entity_reg.async_get("sensor.test_sum") + entity = entity_registry.async_get("sensor.test_sum") assert entity.unique_id == "very_unique_id_sum_sensor" diff --git a/tests/components/minecraft_server/test_init.py b/tests/components/minecraft_server/test_init.py index 09e411f0b62..018fdac542e 100644 --- a/tests/components/minecraft_server/test_init.py +++ b/tests/components/minecraft_server/test_init.py @@ -178,7 +178,10 @@ async def test_setup_entry_not_ready( async def test_entry_migration( - hass: HomeAssistant, v1_mock_config_entry: MockConfigEntry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + v1_mock_config_entry: MockConfigEntry, ) -> None: """Test entry migration from version 1 to 3, where host and port is required for the connection to the server.""" v1_mock_config_entry.add_to_hass(hass) @@ -218,12 +221,10 @@ async def test_entry_migration( assert migrated_config_entry.state == ConfigEntryState.LOADED # Test migrated device entry. - device_registry = dr.async_get(hass) device_entry = device_registry.async_get(device_entry_id) assert device_entry.identifiers == {(DOMAIN, migrated_config_entry.entry_id)} # Test migrated sensor entity entries. - entity_registry = er.async_get(hass) for mapping in sensor_entity_id_key_mapping_list: entity_entry = entity_registry.async_get(mapping["entity_id"]) assert ( diff --git a/tests/components/mobile_app/test_binary_sensor.py b/tests/components/mobile_app/test_binary_sensor.py index b8a6cbb6db6..fe3510865fc 100644 --- a/tests/components/mobile_app/test_binary_sensor.py +++ b/tests/components/mobile_app/test_binary_sensor.py @@ -9,7 +9,10 @@ from homeassistant.helpers import device_registry as dr async def test_sensor( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + create_registrations, + webhook_client, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -77,8 +80,7 @@ async def test_sensor( assert updated_entity.state == "off" assert "foo" not in updated_entity.attributes - dev_reg = dr.async_get(hass) - assert len(dev_reg.devices) == len(create_registrations) + assert len(device_registry.devices) == len(create_registrations) # Reload to verify state is restored config_entry = hass.config_entries.async_entries("mobile_app")[1] diff --git a/tests/components/mobile_app/test_init.py b/tests/components/mobile_app/test_init.py index 8b034fb4ba9..59f2a130737 100644 --- a/tests/components/mobile_app/test_init.py +++ b/tests/components/mobile_app/test_init.py @@ -28,14 +28,16 @@ async def test_unload_unloads( assert len(calls) == 1 -async def test_remove_entry(hass: HomeAssistant, create_registrations) -> None: +async def test_remove_entry( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + create_registrations, +) -> None: """Test we clean up when we remove entry.""" for config_entry in hass.config_entries.async_entries("mobile_app"): await hass.config_entries.async_remove(config_entry.entry_id) assert config_entry.data["webhook_id"] in hass.data[DOMAIN][DATA_DELETED_IDS] - dev_reg = dr.async_get(hass) - assert len(dev_reg.devices) == 0 - - ent_reg = er.async_get(hass) - assert len(ent_reg.entities) == 0 + assert len(device_registry.devices) == 0 + assert len(entity_registry.entities) == 0 diff --git a/tests/components/mobile_app/test_sensor.py b/tests/components/mobile_app/test_sensor.py index 8c8bf45fde2..f7c4a5690db 100644 --- a/tests/components/mobile_app/test_sensor.py +++ b/tests/components/mobile_app/test_sensor.py @@ -25,6 +25,8 @@ from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM ) async def test_sensor( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, create_registrations, webhook_client, unit_system, @@ -77,9 +79,7 @@ async def test_sensor( assert entity.state == state1 assert ( - er.async_get(hass) - .async_get("sensor.test_1_battery_temperature") - .entity_category + entity_registry.async_get("sensor.test_1_battery_temperature").entity_category == "diagnostic" ) @@ -109,8 +109,7 @@ async def test_sensor( assert updated_entity.state == state2 assert "foo" not in updated_entity.attributes - dev_reg = dr.async_get(hass) - assert len(dev_reg.devices) == len(create_registrations) + assert len(device_registry.devices) == len(create_registrations) # Reload to verify state is restored config_entry = hass.config_entries.async_entries("mobile_app")[1] @@ -503,7 +502,10 @@ async def test_sensor_datetime( async def test_default_disabling_entity( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + create_registrations, + webhook_client, ) -> None: """Test that sensors can be disabled by default upon registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -532,13 +534,16 @@ async def test_default_disabling_entity( assert entity is None assert ( - er.async_get(hass).async_get("sensor.test_1_battery_state").disabled_by + entity_registry.async_get("sensor.test_1_battery_state").disabled_by == er.RegistryEntryDisabler.INTEGRATION ) async def test_updating_disabled_sensor( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + create_registrations, + webhook_client, ) -> None: """Test that sensors return error if disabled in instance.""" webhook_id = create_registrations[1]["webhook_id"] @@ -580,7 +585,7 @@ async def test_updating_disabled_sensor( assert json["battery_state"]["success"] is True assert "is_disabled" not in json["battery_state"] - er.async_get(hass).async_update_entity( + entity_registry.async_update_entity( "sensor.test_1_battery_state", disabled_by=er.RegistryEntryDisabler.USER ) diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index 9f6aec404e2..6fe272fbc40 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -854,12 +854,13 @@ async def test_webhook_camera_stream_stream_available_but_errors( async def test_webhook_handle_scan_tag( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + create_registrations, + webhook_client, ) -> None: """Test that we can scan tags.""" - device = dr.async_get(hass).async_get_device( - identifiers={(DOMAIN, "mock-device-id")} - ) + device = device_registry.async_get_device(identifiers={(DOMAIN, "mock-device-id")}) assert device is not None events = async_capture_events(hass, EVENT_TAG_SCANNED) @@ -920,7 +921,10 @@ async def test_register_sensor_limits_state_class( async def test_reregister_sensor( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + create_registrations, + webhook_client, ) -> None: """Test that we can add more info in re-registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -941,8 +945,7 @@ async def test_reregister_sensor( assert reg_resp.status == HTTPStatus.CREATED - ent_reg = er.async_get(hass) - entry = ent_reg.async_get("sensor.test_1_battery_state") + entry = entity_registry.async_get("sensor.test_1_battery_state") assert entry.original_name == "Test 1 Battery State" assert entry.device_class is None assert entry.unit_of_measurement is None @@ -970,7 +973,7 @@ async def test_reregister_sensor( ) assert reg_resp.status == HTTPStatus.CREATED - entry = ent_reg.async_get("sensor.test_1_battery_state") + entry = entity_registry.async_get("sensor.test_1_battery_state") assert entry.original_name == "Test 1 New Name" assert entry.device_class == "battery" assert entry.unit_of_measurement == "%" @@ -992,7 +995,7 @@ async def test_reregister_sensor( ) assert reg_resp.status == HTTPStatus.CREATED - entry = ent_reg.async_get("sensor.test_1_battery_state") + entry = entity_registry.async_get("sensor.test_1_battery_state") assert entry.disabled_by is None reg_resp = await webhook_client.post( @@ -1014,7 +1017,7 @@ async def test_reregister_sensor( ) assert reg_resp.status == HTTPStatus.CREATED - entry = ent_reg.async_get("sensor.test_1_battery_state") + entry = entity_registry.async_get("sensor.test_1_battery_state") assert entry.original_name == "Test 1 New Name 2" assert entry.device_class is None assert entry.unit_of_measurement is None @@ -1067,6 +1070,7 @@ async def test_webhook_handle_conversation_process( async def test_sending_sensor_state( hass: HomeAssistant, + entity_registry: er.EntityRegistry, create_registrations, webhook_client, caplog: pytest.LogCaptureFixture, @@ -1105,8 +1109,7 @@ async def test_sending_sensor_state( assert reg_resp.status == HTTPStatus.CREATED - ent_reg = er.async_get(hass) - entry = ent_reg.async_get("sensor.test_1_battery_state") + entry = entity_registry.async_get("sensor.test_1_battery_state") assert entry.original_name == "Test 1 Battery State" assert entry.device_class is None assert entry.unit_of_measurement is None diff --git a/tests/components/modbus/test_binary_sensor.py b/tests/components/modbus/test_binary_sensor.py index 2069aa23b8f..a892dd205fb 100644 --- a/tests/components/modbus/test_binary_sensor.py +++ b/tests/components/modbus/test_binary_sensor.py @@ -445,11 +445,14 @@ async def test_config_virtual_binary_sensor(hass: HomeAssistant, mock_modbus) -> ], ) async def test_virtual_binary_sensor( - hass: HomeAssistant, expected, slaves, mock_do_cycle + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + expected, + slaves, + mock_do_cycle, ) -> None: """Run test for given config.""" assert hass.states.get(ENTITY_ID).state == expected - entity_registry = er.async_get(hass) for i, slave in enumerate(slaves): entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}_{i+1}".replace(" ", "_") diff --git a/tests/components/modbus/test_sensor.py b/tests/components/modbus/test_sensor.py index 1c627faa09c..d0a4e23f780 100644 --- a/tests/components/modbus/test_sensor.py +++ b/tests/components/modbus/test_sensor.py @@ -247,7 +247,7 @@ async def test_config_sensor(hass: HomeAssistant, mock_modbus) -> None: }, ] }, - f"{TEST_ENTITY_NAME}: `{CONF_STRUCTURE}:` missing, demanded with `{CONF_DATA_TYPE}: {DataType.CUSTOM}`", + f"{TEST_ENTITY_NAME}: Size of structure is 0 bytes but `{CONF_COUNT}: 4` is 8 bytes", ), ( { @@ -276,7 +276,7 @@ async def test_config_sensor(hass: HomeAssistant, mock_modbus) -> None: }, ] }, - f"{TEST_ENTITY_NAME}: `{CONF_SWAP}:{CONF_SWAP_WORD}` cannot be combined with `{CONF_DATA_TYPE}: {DataType.CUSTOM}`", + f"{TEST_ENTITY_NAME}: `{CONF_SWAP}:{CONF_SWAP_WORD}` illegal with `{CONF_DATA_TYPE}: {DataType.CUSTOM}`", ), ], ) @@ -869,9 +869,10 @@ async def test_all_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> None: ), ], ) -async def test_virtual_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> None: +async def test_virtual_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_do_cycle, expected +) -> None: """Run test for sensor.""" - entity_registry = er.async_get(hass) for i in range(0, len(expected)): entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") unique_id = f"{SLAVE_UNIQUE_ID}" diff --git a/tests/components/modern_forms/test_binary_sensor.py b/tests/components/modern_forms/test_binary_sensor.py index 6b64beb4f1a..3ea0fca99d5 100644 --- a/tests/components/modern_forms/test_binary_sensor.py +++ b/tests/components/modern_forms/test_binary_sensor.py @@ -11,20 +11,20 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_binary_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test the creation and values of the Modern Forms sensors.""" - registry = er.async_get(hass) - - registry.async_get_or_create( + entity_registry.async_get_or_create( BINARY_SENSOR_DOMAIN, DOMAIN, "AA:BB:CC:DD:EE:FF_light_sleep_timer_active", suggested_object_id="modernformsfan_light_sleep_timer_active", disabled_by=None, ) - registry.async_get_or_create( + entity_registry.async_get_or_create( BINARY_SENSOR_DOMAIN, DOMAIN, "AA:BB:CC:DD:EE:FF_fan_sleep_timer_active", diff --git a/tests/components/modern_forms/test_fan.py b/tests/components/modern_forms/test_fan.py index 12083bb5ab6..9dc5ca9960f 100644 --- a/tests/components/modern_forms/test_fan.py +++ b/tests/components/modern_forms/test_fan.py @@ -35,13 +35,13 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_fan_state( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test the creation and values of the Modern Forms fans.""" await init_integration(hass, aioclient_mock) - entity_registry = er.async_get(hass) - state = hass.states.get("fan.modernformsfan_fan") assert state assert state.attributes.get(ATTR_PERCENTAGE) == 50 diff --git a/tests/components/modern_forms/test_init.py b/tests/components/modern_forms/test_init.py index b989f0f9ef3..9befb36d00d 100644 --- a/tests/components/modern_forms/test_init.py +++ b/tests/components/modern_forms/test_init.py @@ -38,13 +38,14 @@ async def test_unload_config_entry( async def test_fan_only_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test we set unique ID if not set yet.""" await init_integration( hass, aioclient_mock, mock_type=modern_forms_no_light_call_mock ) - entity_registry = er.async_get(hass) fan_entry = entity_registry.async_get("fan.modernformsfan_fan") assert fan_entry diff --git a/tests/components/modern_forms/test_light.py b/tests/components/modern_forms/test_light.py index 7e5b5e824f2..080290944b2 100644 --- a/tests/components/modern_forms/test_light.py +++ b/tests/components/modern_forms/test_light.py @@ -28,13 +28,13 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_light_state( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test the creation and values of the Modern Forms lights.""" await init_integration(hass, aioclient_mock) - entity_registry = er.async_get(hass) - state = hass.states.get("light.modernformsfan_light") assert state assert state.attributes.get(ATTR_BRIGHTNESS) == 128 diff --git a/tests/components/modern_forms/test_sensor.py b/tests/components/modern_forms/test_sensor.py index 7e3914cd7d9..279942f39a9 100644 --- a/tests/components/modern_forms/test_sensor.py +++ b/tests/components/modern_forms/test_sensor.py @@ -4,7 +4,6 @@ from datetime import datetime from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_ICON from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from . import init_integration, modern_forms_timers_set_mock @@ -18,7 +17,6 @@ async def test_sensors( # await init_integration(hass, aioclient_mock) await init_integration(hass, aioclient_mock) - er.async_get(hass) # Light timer remaining time state = hass.states.get("sensor.modernformsfan_light_sleep_time") @@ -42,7 +40,6 @@ async def test_active_sensors( # await init_integration(hass, aioclient_mock) await init_integration(hass, aioclient_mock, mock_type=modern_forms_timers_set_mock) - er.async_get(hass) # Light timer remaining time state = hass.states.get("sensor.modernformsfan_light_sleep_time") diff --git a/tests/components/modern_forms/test_switch.py b/tests/components/modern_forms/test_switch.py index eae51d034f6..b0ddc31150b 100644 --- a/tests/components/modern_forms/test_switch.py +++ b/tests/components/modern_forms/test_switch.py @@ -22,13 +22,13 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_switch_state( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, ) -> None: """Test the creation and values of the Modern Forms switches.""" await init_integration(hass, aioclient_mock) - entity_registry = er.async_get(hass) - state = hass.states.get("switch.modernformsfan_away_mode") assert state assert state.attributes.get(ATTR_ICON) == "mdi:airplane-takeoff" diff --git a/tests/components/monoprice/test_media_player.py b/tests/components/monoprice/test_media_player.py index fb1c2ece186..c2f9ef01111 100644 --- a/tests/components/monoprice/test_media_player.py +++ b/tests/components/monoprice/test_media_player.py @@ -489,45 +489,45 @@ async def test_volume_up_down(hass: HomeAssistant) -> None: assert monoprice.zones[11].volume == 37 -async def test_first_run_with_available_zones(hass: HomeAssistant) -> None: +async def test_first_run_with_available_zones( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test first run with all zones available.""" monoprice = MockMonoprice() await _setup_monoprice(hass, monoprice) - registry = er.async_get(hass) - - entry = registry.async_get(ZONE_7_ID) + entry = entity_registry.async_get(ZONE_7_ID) assert not entry.disabled -async def test_first_run_with_failing_zones(hass: HomeAssistant) -> None: +async def test_first_run_with_failing_zones( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test first run with failed zones.""" monoprice = MockMonoprice() with patch.object(MockMonoprice, "zone_status", side_effect=SerialException): await _setup_monoprice(hass, monoprice) - registry = er.async_get(hass) - - entry = registry.async_get(ZONE_1_ID) + entry = entity_registry.async_get(ZONE_1_ID) assert not entry.disabled - entry = registry.async_get(ZONE_7_ID) + entry = entity_registry.async_get(ZONE_7_ID) assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION -async def test_not_first_run_with_failing_zone(hass: HomeAssistant) -> None: +async def test_not_first_run_with_failing_zone( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test first run with failed zones.""" monoprice = MockMonoprice() with patch.object(MockMonoprice, "zone_status", side_effect=SerialException): await _setup_monoprice_not_first_run(hass, monoprice) - registry = er.async_get(hass) - - entry = registry.async_get(ZONE_1_ID) + entry = entity_registry.async_get(ZONE_1_ID) assert not entry.disabled - entry = registry.async_get(ZONE_7_ID) + entry = entity_registry.async_get(ZONE_7_ID) assert not entry.disabled diff --git a/tests/components/moon/test_sensor.py b/tests/components/moon/test_sensor.py index 922febed3bf..38af8dcb912 100644 --- a/tests/components/moon/test_sensor.py +++ b/tests/components/moon/test_sensor.py @@ -39,6 +39,8 @@ from tests.common import MockConfigEntry ) async def test_moon_day( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, mock_config_entry: MockConfigEntry, moon_value: float, native_value: str, @@ -70,13 +72,11 @@ async def test_moon_day( STATE_WANING_CRESCENT, ] - entity_registry = er.async_get(hass) entry = entity_registry.async_get("sensor.moon_phase") assert entry assert entry.unique_id == mock_config_entry.entry_id assert entry.translation_key == "phase" - device_registry = dr.async_get(hass) assert entry.device_id device_entry = device_registry.async_get(entry.device_id) assert device_entry diff --git a/tests/components/motioneye/test_camera.py b/tests/components/motioneye/test_camera.py index 5f5c5f7854e..5af8d4139eb 100644 --- a/tests/components/motioneye/test_camera.py +++ b/tests/components/motioneye/test_camera.py @@ -135,10 +135,12 @@ async def test_setup_camera_new_data_same(hass: HomeAssistant) -> None: assert hass.states.get(TEST_CAMERA_ENTITY_ID) -async def test_setup_camera_new_data_camera_removed(hass: HomeAssistant) -> None: +async def test_setup_camera_new_data_camera_removed( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: """Test a data refresh with a removed camera.""" - device_registry = dr.async_get(hass) - entity_registry = er.async_get(hass) client = create_mock_motioneye_client() config_entry = await setup_mock_motioneye_config_entry(hass, client=client) @@ -315,12 +317,15 @@ async def test_state_attributes(hass: HomeAssistant) -> None: assert not entity_state.attributes.get("motion_detection") -async def test_device_info(hass: HomeAssistant) -> None: +async def test_device_info( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: """Verify device information includes expected details.""" entry = await setup_mock_motioneye_config_entry(hass) device_identifier = get_motioneye_device_identifier(entry.entry_id, TEST_CAMERA_ID) - device_registry = dr.async_get(hass) device = device_registry.async_get_device(identifiers={device_identifier}) assert device @@ -330,7 +335,6 @@ async def test_device_info(hass: HomeAssistant) -> None: assert device.model == MOTIONEYE_MANUFACTURER assert device.name == TEST_CAMERA_NAME - entity_registry = er.async_get(hass) entities_from_device = [ entry.entity_id for entry in er.async_entries_for_device(entity_registry, device.id) diff --git a/tests/components/motioneye/test_media_source.py b/tests/components/motioneye/test_media_source.py index cb42e51f474..6b90870c4da 100644 --- a/tests/components/motioneye/test_media_source.py +++ b/tests/components/motioneye/test_media_source.py @@ -78,13 +78,14 @@ async def setup_media_source(hass) -> None: assert await async_setup_component(hass, "media_source", {}) -async def test_async_browse_media_success(hass: HomeAssistant) -> None: +async def test_async_browse_media_success( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: """Test successful browse media.""" client = create_mock_motioneye_client() config = await setup_mock_motioneye_config_entry(hass, client=client) - device_registry = dr.async_get(hass) device = device_registry.async_get_or_create( config_entry_id=config.entry_id, identifiers={TEST_CAMERA_DEVICE_IDENTIFIER}, @@ -295,13 +296,14 @@ async def test_async_browse_media_success(hass: HomeAssistant) -> None: } -async def test_async_browse_media_images_success(hass: HomeAssistant) -> None: +async def test_async_browse_media_images_success( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: """Test successful browse media of images.""" client = create_mock_motioneye_client() config = await setup_mock_motioneye_config_entry(hass, client=client) - device_registry = dr.async_get(hass) device = device_registry.async_get_or_create( config_entry_id=config.entry_id, identifiers={TEST_CAMERA_DEVICE_IDENTIFIER}, @@ -346,14 +348,15 @@ async def test_async_browse_media_images_success(hass: HomeAssistant) -> None: } -async def test_async_resolve_media_success(hass: HomeAssistant) -> None: +async def test_async_resolve_media_success( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: """Test successful resolve media.""" client = create_mock_motioneye_client() config = await setup_mock_motioneye_config_entry(hass, client=client) - device_registry = dr.async_get(hass) device = device_registry.async_get_or_create( config_entry_id=config.entry_id, identifiers={TEST_CAMERA_DEVICE_IDENTIFIER}, @@ -380,14 +383,15 @@ async def test_async_resolve_media_success(hass: HomeAssistant) -> None: assert client.get_image_url.call_args == call(TEST_CAMERA_ID, "/foo.jpg") -async def test_async_resolve_media_failure(hass: HomeAssistant) -> None: +async def test_async_resolve_media_failure( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: """Test failed resolve media calls.""" client = create_mock_motioneye_client() config = await setup_mock_motioneye_config_entry(hass, client=client) - device_registry = dr.async_get(hass) device = device_registry.async_get_or_create( config_entry_id=config.entry_id, identifiers={TEST_CAMERA_DEVICE_IDENTIFIER}, diff --git a/tests/components/motioneye/test_sensor.py b/tests/components/motioneye/test_sensor.py index 659738ef2c5..0892c0dead0 100644 --- a/tests/components/motioneye/test_sensor.py +++ b/tests/components/motioneye/test_sensor.py @@ -73,7 +73,11 @@ async def test_sensor_actions( assert entity_state.attributes.get(KEY_ACTIONS) is None -async def test_sensor_device_info(hass: HomeAssistant) -> None: +async def test_sensor_device_info( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: """Verify device information includes expected details.""" # Enable the action sensor (it is disabled by default). @@ -91,11 +95,9 @@ async def test_sensor_device_info(hass: HomeAssistant) -> None: config_entry.entry_id, TEST_CAMERA_ID ) - device_registry = dr.async_get(hass) device = device_registry.async_get_device(identifiers={device_identifer}) assert device - entity_registry = er.async_get(hass) entities_from_device = [ entry.entity_id for entry in er.async_entries_for_device(entity_registry, device.id) @@ -104,12 +106,13 @@ async def test_sensor_device_info(hass: HomeAssistant) -> None: async def test_sensor_actions_can_be_enabled( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Verify the action sensor can be enabled.""" client = create_mock_motioneye_client() await setup_mock_motioneye_config_entry(hass, client=client) - entity_registry = er.async_get(hass) entry = entity_registry.async_get(TEST_SENSOR_ACTION_ENTITY_ID) assert entry diff --git a/tests/components/motioneye/test_switch.py b/tests/components/motioneye/test_switch.py index cc193f5fb60..a6fbcc49052 100644 --- a/tests/components/motioneye/test_switch.py +++ b/tests/components/motioneye/test_switch.py @@ -152,7 +152,9 @@ async def test_switch_has_correct_entities(hass: HomeAssistant) -> None: async def test_disabled_switches_can_be_enabled( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Verify disabled switches can be enabled.""" client = create_mock_motioneye_client() @@ -165,7 +167,6 @@ async def test_disabled_switches_can_be_enabled( for switch_key in disabled_switch_keys: entity_id = f"{TEST_SWITCH_ENTITY_ID_BASE}_{switch_key}" - entity_registry = er.async_get(hass) entry = entity_registry.async_get(entity_id) assert entry assert entry.disabled @@ -191,19 +192,21 @@ async def test_disabled_switches_can_be_enabled( assert entity_state -async def test_switch_device_info(hass: HomeAssistant) -> None: +async def test_switch_device_info( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: """Verify device information includes expected details.""" config_entry = await setup_mock_motioneye_config_entry(hass) device_identifer = get_motioneye_device_identifier( config_entry.entry_id, TEST_CAMERA_ID ) - device_registry = dr.async_get(hass) device = device_registry.async_get_device(identifiers={device_identifer}) assert device - entity_registry = er.async_get(hass) entities_from_device = [ entry.entity_id for entry in er.async_entries_for_device(entity_registry, device.id) diff --git a/tests/components/motioneye/test_web_hooks.py b/tests/components/motioneye/test_web_hooks.py index 617f472ab4e..7c66645bb44 100644 --- a/tests/components/motioneye/test_web_hooks.py +++ b/tests/components/motioneye/test_web_hooks.py @@ -63,12 +63,13 @@ WEB_HOOK_FILE_STORED_QUERY_STRING = ( ) -async def test_setup_camera_without_webhook(hass: HomeAssistant) -> None: +async def test_setup_camera_without_webhook( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: """Test a camera with no webhook.""" client = create_mock_motioneye_client() config_entry = await setup_mock_motioneye_config_entry(hass, client=client) - device_registry = dr.async_get(hass) device = device_registry.async_get_device( identifiers={TEST_CAMERA_DEVICE_IDENTIFIER} ) @@ -95,6 +96,7 @@ async def test_setup_camera_without_webhook(hass: HomeAssistant) -> None: async def test_setup_camera_with_wrong_webhook( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, ) -> None: """Test camera with wrong web hook.""" wrong_url = "http://wrong-url" @@ -123,7 +125,6 @@ async def test_setup_camera_with_wrong_webhook( ) await hass.async_block_till_done() - device_registry = dr.async_get(hass) device = device_registry.async_get_device( identifiers={TEST_CAMERA_DEVICE_IDENTIFIER} ) @@ -151,6 +152,7 @@ async def test_setup_camera_with_wrong_webhook( async def test_setup_camera_with_old_webhook( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, ) -> None: """Verify that webhooks are overwritten if they are from this integration. @@ -176,7 +178,6 @@ async def test_setup_camera_with_old_webhook( ) assert client.async_set_camera.called - device_registry = dr.async_get(hass) device = device_registry.async_get_device( identifiers={TEST_CAMERA_DEVICE_IDENTIFIER} ) @@ -204,6 +205,7 @@ async def test_setup_camera_with_old_webhook( async def test_setup_camera_with_correct_webhook( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, ) -> None: """Verify that webhooks are not overwritten if they are already correct.""" @@ -212,7 +214,6 @@ async def test_setup_camera_with_correct_webhook( hass, data={CONF_URL: TEST_URL, CONF_WEBHOOK_ID: "webhook_secret_id"} ) - device_registry = dr.async_get(hass) device = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={TEST_CAMERA_DEVICE_IDENTIFIER}, @@ -278,12 +279,13 @@ async def test_setup_camera_with_no_home_assistant_urls( async def test_good_query( - hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + hass_client_no_auth: ClientSessionGenerator, ) -> None: """Test good callbacks.""" await async_setup_component(hass, "http", {"http": {}}) - device_registry = dr.async_get(hass) client = create_mock_motioneye_client() config_entry = await setup_mock_motioneye_config_entry(hass, client=client) @@ -377,12 +379,13 @@ async def test_bad_query_cannot_decode( async def test_event_media_data( - hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + hass_client_no_auth: ClientSessionGenerator, ) -> None: """Test an event with a file path generates media data.""" await async_setup_component(hass, "http", {"http": {}}) - device_registry = dr.async_get(hass) client = create_mock_motioneye_client() config_entry = await setup_mock_motioneye_config_entry(hass, client=client) diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index f3bf92951b0..8db1c89bc40 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -3347,6 +3347,11 @@ async def test_set_state_via_stopped_state_no_position_topic( state = hass.states.get("cover.test") assert state.state == STATE_CLOSED + async_fire_mqtt_message(hass, "state-topic", "STOPPED") + + state = hass.states.get("cover.test") + assert state.state == STATE_CLOSED + @pytest.mark.parametrize( "hass_config", diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 485c2774f7b..90360bf7e3f 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -973,11 +973,12 @@ async def test_attach_remove_late2( async def test_entity_device_info_with_connection( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test MQTT device registry integration.""" await mqtt_mock_entry() - registry = dr.async_get(hass) data = json.dumps( { @@ -998,7 +999,7 @@ async def test_entity_device_info_with_connection( async_fire_mqtt_message(hass, "homeassistant/device_automation/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device( + device = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, "02:5b:26:a8:dc:12")} ) assert device is not None @@ -1011,11 +1012,12 @@ async def test_entity_device_info_with_connection( async def test_entity_device_info_with_identifier( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test MQTT device registry integration.""" await mqtt_mock_entry() - registry = dr.async_get(hass) data = json.dumps( { @@ -1036,7 +1038,7 @@ async def test_entity_device_info_with_identifier( async_fire_mqtt_message(hass, "homeassistant/device_automation/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None assert device.identifiers == {("mqtt", "helloworld")} assert device.manufacturer == "Whatever" @@ -1047,11 +1049,12 @@ async def test_entity_device_info_with_identifier( async def test_entity_device_info_update( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test device registry update.""" await mqtt_mock_entry() - registry = dr.async_get(hass) config = { "automation_type": "trigger", @@ -1072,7 +1075,7 @@ async def test_entity_device_info_update( async_fire_mqtt_message(hass, "homeassistant/device_automation/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None assert device.name == "Beer" @@ -1081,7 +1084,7 @@ async def test_entity_device_info_update( async_fire_mqtt_message(hass, "homeassistant/device_automation/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None assert device.name == "Milk" @@ -1390,14 +1393,15 @@ async def test_cleanup_device_with_entity2( async def test_trigger_debug_info( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test debug_info. This is a test helper for MQTT debug_info. """ await mqtt_mock_entry() - registry = dr.async_get(hass) config1 = { "platform": "mqtt", @@ -1429,7 +1433,7 @@ async def test_trigger_debug_info( async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", data) await hass.async_block_till_done() - device = registry.async_get_device( + device = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, "02:5b:26:a8:dc:12")} ) assert device is not None diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index ed01b70e660..017d24a39ce 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -34,7 +34,8 @@ from tests.common import ( MockConfigEntry, async_capture_events, async_fire_mqtt_message, - mock_entity_platform, + mock_config_flow, + mock_platform, ) from tests.typing import ( MqttMockHAClientGenerator, @@ -1499,7 +1500,7 @@ async def test_mqtt_integration_discovery_subscribe_unsubscribe( ) -> None: """Check MQTT integration discovery subscribe and unsubscribe.""" mqtt_mock = await mqtt_mock_entry() - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) entry = hass.config_entries.async_entries("mqtt")[0] mqtt_mock().connected = True @@ -1522,7 +1523,7 @@ async def test_mqtt_integration_discovery_subscribe_unsubscribe( """Test mqtt step.""" return self.async_abort(reason="already_configured") - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): await asyncio.sleep(0) assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) assert not mqtt_client_mock.unsubscribe.called @@ -1552,7 +1553,7 @@ async def test_mqtt_discovery_unsubscribe_once( ) -> None: """Check MQTT integration discovery unsubscribe once.""" mqtt_mock = await mqtt_mock_entry() - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) entry = hass.config_entries.async_entries("mqtt")[0] mqtt_mock().connected = True @@ -1575,7 +1576,7 @@ async def test_mqtt_discovery_unsubscribe_once( """Test mqtt step.""" return self.async_abort(reason="already_configured") - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + with mock_config_flow("comp", TestFlow): async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") async_fire_mqtt_message(hass, "comp/discovery/bla/config", "") await asyncio.sleep(0.1) diff --git a/tests/components/mqtt/test_event.py b/tests/components/mqtt/test_event.py index 4c0e63fec1f..e178eb40c0e 100644 --- a/tests/components/mqtt/test_event.py +++ b/tests/components/mqtt/test_event.py @@ -500,14 +500,15 @@ async def test_entity_id_update_discovery_update( async def test_entity_device_info_with_hub( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test MQTT event device registry integration.""" await mqtt_mock_entry() other_config_entry = MockConfigEntry() other_config_entry.add_to_hass(hass) - registry = dr.async_get(hass) - hub = registry.async_get_or_create( + hub = device_registry.async_get_or_create( config_entry_id=other_config_entry.entry_id, connections=set(), identifiers={("mqtt", "hub-id")}, @@ -527,7 +528,7 @@ async def test_entity_device_info_with_hub( async_fire_mqtt_message(hass, "homeassistant/event/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None assert device.via_device_id == hub.id diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 21d3bcce3a9..e7c4eba54e2 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -705,8 +705,9 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(fan.ATTR_PERCENTAGE) == 0 assert state.attributes.get(ATTR_ASSUMED_STATE) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "low") + assert exc.value.translation_key == "not_valid_preset_mode" await common.async_set_preset_mode(hass, "fan.test", "whoosh") mqtt_mock.async_publish.assert_called_once_with( @@ -916,11 +917,13 @@ async def test_sending_mqtt_commands_and_optimistic_no_legacy( assert state.attributes.get(fan.ATTR_PERCENTAGE) == 0 assert state.attributes.get(ATTR_ASSUMED_STATE) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "low") + assert exc.value.translation_key == "not_valid_preset_mode" - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "auto") + assert exc.value.translation_key == "not_valid_preset_mode" await common.async_set_preset_mode(hass, "fan.test", "whoosh") mqtt_mock.async_publish.assert_called_once_with( @@ -976,8 +979,9 @@ async def test_sending_mqtt_commands_and_optimistic_no_legacy( assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_turn_on(hass, "fan.test", preset_mode="freaking-high") + assert exc.value.translation_key == "not_valid_preset_mode" @pytest.mark.parametrize( @@ -1078,11 +1082,13 @@ async def test_sending_mqtt_command_templates_( assert state.attributes.get(fan.ATTR_PERCENTAGE) == 0 assert state.attributes.get(ATTR_ASSUMED_STATE) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "low") + assert exc.value.translation_key == "not_valid_preset_mode" - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "medium") + assert exc.value.translation_key == "not_valid_preset_mode" await common.async_set_preset_mode(hass, "fan.test", "whoosh") mqtt_mock.async_publish.assert_called_once_with( @@ -1140,8 +1146,9 @@ async def test_sending_mqtt_command_templates_( assert state.state == STATE_ON assert state.attributes.get(ATTR_ASSUMED_STATE) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_turn_on(hass, "fan.test", preset_mode="low") + assert exc.value.translation_key == "not_valid_preset_mode" @pytest.mark.parametrize( @@ -1176,8 +1183,9 @@ async def test_sending_mqtt_commands_and_optimistic_no_percentage_topic( assert state.state == STATE_UNKNOWN assert state.attributes.get(ATTR_ASSUMED_STATE) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "medium") + assert exc.value.translation_key == "not_valid_preset_mode" await common.async_set_preset_mode(hass, "fan.test", "whoosh") mqtt_mock.async_publish.assert_called_once_with( @@ -1276,11 +1284,10 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( assert state.state == STATE_OFF assert state.attributes.get(ATTR_ASSUMED_STATE) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_turn_on(hass, "fan.test", preset_mode="auto") - assert mqtt_mock.async_publish.call_count == 1 - # We can turn on, but the invalid preset mode will raise - mqtt_mock.async_publish.assert_any_call("command-topic", "ON", 0, False) + assert exc.value.translation_key == "not_valid_preset_mode" + assert mqtt_mock.async_publish.call_count == 0 mqtt_mock.async_publish.reset_mock() await common.async_turn_on(hass, "fan.test", preset_mode="whoosh") @@ -1428,11 +1435,13 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( with pytest.raises(MultipleInvalid): await common.async_set_percentage(hass, "fan.test", 101) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "low") + assert exc.value.translation_key == "not_valid_preset_mode" - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "medium") + assert exc.value.translation_key == "not_valid_preset_mode" await common.async_set_preset_mode(hass, "fan.test", "whoosh") mqtt_mock.async_publish.assert_called_once_with( @@ -1452,8 +1461,9 @@ async def test_sending_mqtt_commands_and_explicit_optimistic( assert state.state == STATE_OFF assert state.attributes.get(ATTR_ASSUMED_STATE) - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await common.async_set_preset_mode(hass, "fan.test", "freaking-high") + assert exc.value.translation_key == "not_valid_preset_mode" mqtt_mock.async_publish.reset_mock() state = hass.states.get("fan.test") diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 5bb86662322..d31570548f0 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -3050,7 +3050,9 @@ async def test_mqtt_ws_get_device_debug_info_binary( async def test_debug_info_multiple_devices( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test we get correct debug_info when multiple devices are present.""" await mqtt_mock_entry() @@ -3097,8 +3099,6 @@ async def test_debug_info_multiple_devices( }, ] - registry = dr.async_get(hass) - for dev in devices: data = json.dumps(dev["config"]) domain = dev["domain"] @@ -3109,7 +3109,7 @@ async def test_debug_info_multiple_devices( for dev in devices: domain = dev["domain"] id = dev["config"]["device"]["identifiers"][0] - device = registry.async_get_device(identifiers={("mqtt", id)}) + device = device_registry.async_get_device(identifiers={("mqtt", id)}) assert device is not None debug_info_data = debug_info.info_for_device(hass, device.id) @@ -3132,7 +3132,9 @@ async def test_debug_info_multiple_devices( async def test_debug_info_multiple_entities_triggers( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test we get correct debug_info for a device with multiple entities and triggers.""" await mqtt_mock_entry() @@ -3179,8 +3181,6 @@ async def test_debug_info_multiple_entities_triggers( }, ] - registry = dr.async_get(hass) - for c in config: data = json.dumps(c["config"]) domain = c["domain"] @@ -3190,7 +3190,7 @@ async def test_debug_info_multiple_entities_triggers( await hass.async_block_till_done() device_id = config[0]["config"]["device"]["identifiers"][0] - device = registry.async_get_device(identifiers={("mqtt", device_id)}) + device = device_registry.async_get_device(identifiers={("mqtt", device_id)}) assert device is not None debug_info_data = debug_info.info_for_device(hass, device.id) assert len(debug_info_data["entities"]) == 2 @@ -3253,7 +3253,9 @@ async def test_debug_info_non_mqtt( async def test_debug_info_wildcard( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test debug info.""" await mqtt_mock_entry() @@ -3264,13 +3266,11 @@ async def test_debug_info_wildcard( "unique_id": "veryunique", } - registry = dr.async_get(hass) - data = json.dumps(config) async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None debug_info_data = debug_info.info_for_device(hass, device.id) @@ -3301,7 +3301,9 @@ async def test_debug_info_wildcard( async def test_debug_info_filter_same( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test debug info removes messages with same timestamp.""" await mqtt_mock_entry() @@ -3312,13 +3314,11 @@ async def test_debug_info_filter_same( "unique_id": "veryunique", } - registry = dr.async_get(hass) - data = json.dumps(config) async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None debug_info_data = debug_info.info_for_device(hass, device.id) @@ -3361,7 +3361,9 @@ async def test_debug_info_filter_same( async def test_debug_info_same_topic( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test debug info.""" await mqtt_mock_entry() @@ -3373,13 +3375,11 @@ async def test_debug_info_same_topic( "unique_id": "veryunique", } - registry = dr.async_get(hass) - data = json.dumps(config) async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None debug_info_data = debug_info.info_for_device(hass, device.id) @@ -3415,7 +3415,9 @@ async def test_debug_info_same_topic( async def test_debug_info_qos_retain( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test debug info.""" await mqtt_mock_entry() @@ -3426,13 +3428,11 @@ async def test_debug_info_qos_retain( "unique_id": "veryunique", } - registry = dr.async_get(hass) - data = json.dumps(config) async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None debug_info_data = debug_info.info_for_device(hass, device.id) diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index e7471829856..82b0b3467f4 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -1785,6 +1785,24 @@ async def test_brightness_scale( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 + # Turn on the light with half brightness + async_fire_mqtt_message( + hass, "test_light_bright_scale", '{"state":"ON", "brightness": 50}' + ) + + state = hass.states.get("light.test") + assert state.state == STATE_ON + assert state.attributes.get("brightness") == 129 + + # Test limmiting max brightness + async_fire_mqtt_message( + hass, "test_light_bright_scale", '{"state":"ON", "brightness": 103}' + ) + + state = hass.states.get("light.test") + assert state.state == STATE_ON + assert state.attributes.get("brightness") == 255 + @pytest.mark.parametrize( "hass_config", @@ -1844,7 +1862,7 @@ async def test_white_scale( state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("brightness") == 128 + assert state.attributes.get("brightness") == 129 @pytest.mark.parametrize( diff --git a/tests/components/mqtt/test_mixins.py b/tests/components/mqtt/test_mixins.py index 1ca9bf07d72..7a625a2f5f6 100644 --- a/tests/components/mqtt/test_mixins.py +++ b/tests/components/mqtt/test_mixins.py @@ -312,6 +312,7 @@ async def test_availability_with_shared_state_topic( @patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) async def test_default_entity_and_device_name( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, mqtt_client_mock: MqttMockPahoClient, mqtt_config_entry_data, caplog: pytest.LogCaptureFixture, @@ -336,9 +337,7 @@ async def test_default_entity_and_device_name( hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() - registry = dr.async_get(hass) - - device = registry.async_get_device({("mqtt", "helloworld")}) + device = device_registry.async_get_device({("mqtt", "helloworld")}) assert device is not None assert device.name == device_name diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index 0f1be02875c..e33d626c5d8 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -1134,14 +1134,15 @@ async def test_entity_id_update_discovery_update( async def test_entity_device_info_with_hub( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test MQTT sensor device registry integration.""" await mqtt_mock_entry() other_config_entry = MockConfigEntry() other_config_entry.add_to_hass(hass) - registry = dr.async_get(hass) - hub = registry.async_get_or_create( + hub = device_registry.async_get_or_create( config_entry_id=other_config_entry.entry_id, connections=set(), identifiers={("mqtt", "hub-id")}, @@ -1160,7 +1161,7 @@ async def test_entity_device_info_with_hub( async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None assert device.via_device_id == hub.id diff --git a/tests/components/mqtt/test_tag.py b/tests/components/mqtt/test_tag.py index 55eac636edb..0476c880b1a 100644 --- a/tests/components/mqtt/test_tag.py +++ b/tests/components/mqtt/test_tag.py @@ -444,11 +444,12 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( async def test_entity_device_info_with_connection( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test MQTT device registry integration.""" await mqtt_mock_entry() - registry = dr.async_get(hass) data = json.dumps( { @@ -466,7 +467,7 @@ async def test_entity_device_info_with_connection( async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device( + device = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, "02:5b:26:a8:dc:12")} ) assert device is not None @@ -479,11 +480,12 @@ async def test_entity_device_info_with_connection( async def test_entity_device_info_with_identifier( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test MQTT device registry integration.""" await mqtt_mock_entry() - registry = dr.async_get(hass) data = json.dumps( { @@ -501,7 +503,7 @@ async def test_entity_device_info_with_identifier( async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None assert device.identifiers == {("mqtt", "helloworld")} assert device.manufacturer == "Whatever" @@ -512,11 +514,12 @@ async def test_entity_device_info_with_identifier( async def test_entity_device_info_update( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test device registry update.""" await mqtt_mock_entry() - registry = dr.async_get(hass) config = { "topic": "test-topic", @@ -534,7 +537,7 @@ async def test_entity_device_info_update( async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None assert device.name == "Beer" @@ -543,7 +546,7 @@ async def test_entity_device_info_update( async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data) await hass.async_block_till_done() - device = registry.async_get_device(identifiers={("mqtt", "helloworld")}) + device = device_registry.async_get_device(identifiers={("mqtt", "helloworld")}) assert device is not None assert device.name == "Milk" diff --git a/tests/components/mqtt_room/test_sensor.py b/tests/components/mqtt_room/test_sensor.py index 72540f49ca7..822e028f4f6 100644 --- a/tests/components/mqtt_room/test_sensor.py +++ b/tests/components/mqtt_room/test_sensor.py @@ -118,7 +118,7 @@ async def test_room_update(hass: HomeAssistant, mqtt_mock: MqttMockHAClient) -> async def test_unique_id_is_set( - hass: HomeAssistant, mqtt_mock: MqttMockHAClient + hass: HomeAssistant, entity_registry: er.EntityRegistry, mqtt_mock: MqttMockHAClient ) -> None: """Test the updating between rooms.""" unique_name = "my_unique_name_0123456789" @@ -141,6 +141,5 @@ async def test_unique_id_is_set( state = hass.states.get(SENSOR_STATE) assert state.state is not None - entity_registry = er.async_get(hass) entry = entity_registry.async_get(SENSOR_STATE) assert entry.unique_id == unique_name diff --git a/tests/components/mysensors/conftest.py b/tests/components/mysensors/conftest.py index 883a94ea02e..64fbb61aac3 100644 --- a/tests/components/mysensors/conftest.py +++ b/tests/components/mysensors/conftest.py @@ -59,7 +59,8 @@ async def serial_transport_fixture( ) as transport_class, patch("mysensors.task.OTAFirmware", autospec=True), patch( "mysensors.task.load_fw", autospec=True ), patch( - "mysensors.task.Persistence", autospec=True + "mysensors.task.Persistence", + autospec=True, ) as persistence_class: persistence = persistence_class.return_value diff --git a/tests/components/mysensors/test_init.py b/tests/components/mysensors/test_init.py index 9d1867b3158..fd61e27a663 100644 --- a/tests/components/mysensors/test_init.py +++ b/tests/components/mysensors/test_init.py @@ -15,6 +15,8 @@ from tests.typing import WebSocketGenerator async def test_remove_config_entry_device( hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, gps_sensor: Sensor, integration: MockConfigEntry, gateway: BaseSyncGateway, @@ -27,11 +29,9 @@ async def test_remove_config_entry_device( assert await async_setup_component(hass, "config", {}) await hass.async_block_till_done() - device_registry = dr.async_get(hass) device_entry = device_registry.async_get_device( identifiers={(DOMAIN, f"{config_entry.entry_id}-{node_id}")} ) - entity_registry = er.async_get(hass) state = hass.states.get(entity_id) assert gateway.sensors diff --git a/tests/components/nam/test_button.py b/tests/components/nam/test_button.py index 4a1083874d0..ab4e46975f9 100644 --- a/tests/components/nam/test_button.py +++ b/tests/components/nam/test_button.py @@ -10,10 +10,8 @@ from homeassistant.util import dt as dt_util from . import init_integration -async def test_button(hass: HomeAssistant) -> None: +async def test_button(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test states of the button.""" - registry = er.async_get(hass) - await init_integration(hass) state = hass.states.get("button.nettigo_air_monitor_restart") @@ -21,7 +19,7 @@ async def test_button(hass: HomeAssistant) -> None: assert state.state == STATE_UNKNOWN assert state.attributes.get(ATTR_DEVICE_CLASS) == ButtonDeviceClass.RESTART - entry = registry.async_get("button.nettigo_air_monitor_restart") + entry = entity_registry.async_get("button.nettigo_air_monitor_restart") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-restart" diff --git a/tests/components/nam/test_init.py b/tests/components/nam/test_init.py index dbd1c152d6b..63034d5b075 100644 --- a/tests/components/nam/test_init.py +++ b/tests/components/nam/test_init.py @@ -93,11 +93,11 @@ async def test_unload_entry(hass: HomeAssistant) -> None: assert not hass.data.get(DOMAIN) -async def test_remove_air_quality_entities(hass: HomeAssistant) -> None: +async def test_remove_air_quality_entities( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test remove air_quality entities from registry.""" - registry = er.async_get(hass) - - registry.async_get_or_create( + entity_registry.async_get_or_create( AIR_QUALITY_PLATFORM, DOMAIN, "aa:bb:cc:dd:ee:ff-sds011", @@ -105,7 +105,7 @@ async def test_remove_air_quality_entities(hass: HomeAssistant) -> None: disabled_by=None, ) - registry.async_get_or_create( + entity_registry.async_get_or_create( AIR_QUALITY_PLATFORM, DOMAIN, "aa:bb:cc:dd:ee:ff-sps30", @@ -115,8 +115,8 @@ async def test_remove_air_quality_entities(hass: HomeAssistant) -> None: await init_integration(hass) - entry = registry.async_get("air_quality.nettigo_air_monitor_sds011") + entry = entity_registry.async_get("air_quality.nettigo_air_monitor_sds011") assert entry is None - entry = registry.async_get("air_quality.nettigo_air_monitor_sps30") + entry = entity_registry.async_get("air_quality.nettigo_air_monitor_sps30") assert entry is None diff --git a/tests/components/nam/test_sensor.py b/tests/components/nam/test_sensor.py index 4f1b95ea206..50cf3aba659 100644 --- a/tests/components/nam/test_sensor.py +++ b/tests/components/nam/test_sensor.py @@ -35,11 +35,9 @@ from . import INCOMPLETE_NAM_DATA, init_integration, nam_data from tests.common import async_fire_time_changed -async def test_sensor(hass: HomeAssistant) -> None: +async def test_sensor(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test states of the air_quality.""" - registry = er.async_get(hass) - - registry.async_get_or_create( + entity_registry.async_get_or_create( SENSOR_DOMAIN, DOMAIN, "aa:bb:cc:dd:ee:ff-signal", @@ -47,7 +45,7 @@ async def test_sensor(hass: HomeAssistant) -> None: disabled_by=None, ) - registry.async_get_or_create( + entity_registry.async_get_or_create( SENSOR_DOMAIN, DOMAIN, "aa:bb:cc:dd:ee:ff-uptime", @@ -67,7 +65,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - entry = registry.async_get("sensor.nettigo_air_monitor_bme280_humidity") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_humidity") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_humidity" @@ -78,7 +76,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = registry.async_get("sensor.nettigo_air_monitor_bme280_temperature") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_temperature") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_temperature" @@ -89,7 +87,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - entry = registry.async_get("sensor.nettigo_air_monitor_bme280_pressure") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_pressure") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_pressure" @@ -100,7 +98,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = registry.async_get("sensor.nettigo_air_monitor_bmp180_temperature") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp180_temperature") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp180_temperature" @@ -111,7 +109,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - entry = registry.async_get("sensor.nettigo_air_monitor_bmp180_pressure") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp180_pressure") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp180_pressure" @@ -122,7 +120,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = registry.async_get("sensor.nettigo_air_monitor_bmp280_temperature") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp280_temperature") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp280_temperature" @@ -133,7 +131,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - entry = registry.async_get("sensor.nettigo_air_monitor_bmp280_pressure") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp280_pressure") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp280_pressure" @@ -144,7 +142,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - entry = registry.async_get("sensor.nettigo_air_monitor_sht3x_humidity") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_sht3x_humidity") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sht3x_humidity" @@ -155,7 +153,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = registry.async_get("sensor.nettigo_air_monitor_sht3x_temperature") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_sht3x_temperature") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sht3x_temperature" @@ -166,7 +164,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - entry = registry.async_get("sensor.nettigo_air_monitor_dht22_humidity") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_dht22_humidity") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-dht22_humidity" @@ -177,7 +175,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = registry.async_get("sensor.nettigo_air_monitor_dht22_temperature") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_dht22_temperature") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-dht22_temperature" @@ -188,7 +186,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - entry = registry.async_get("sensor.nettigo_air_monitor_heca_humidity") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_heca_humidity") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-heca_humidity" @@ -199,7 +197,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = registry.async_get("sensor.nettigo_air_monitor_heca_temperature") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_heca_temperature") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-heca_temperature" @@ -213,7 +211,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == SIGNAL_STRENGTH_DECIBELS_MILLIWATT ) - entry = registry.async_get("sensor.nettigo_air_monitor_signal_strength") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_signal_strength") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-signal" @@ -226,7 +224,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP assert state.attributes.get(ATTR_STATE_CLASS) is None - entry = registry.async_get("sensor.nettigo_air_monitor_uptime") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_uptime") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-uptime" @@ -245,7 +243,7 @@ async def test_sensor(hass: HomeAssistant) -> None: ] assert state.attributes.get(ATTR_ICON) == "mdi:air-filter" - entry = registry.async_get( + entry = entity_registry.async_get( "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level" ) assert entry @@ -259,7 +257,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.state == "19" assert state.attributes.get(ATTR_ICON) == "mdi:air-filter" - entry = registry.async_get( + entry = entity_registry.async_get( "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index" ) assert entry @@ -275,7 +273,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER ) - entry = registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm10") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm10") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p1" @@ -289,7 +287,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER ) - entry = registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm2_5") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm2_5") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p2" @@ -303,7 +301,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER ) - entry = registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm1") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm1") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p0" @@ -317,7 +315,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER ) - entry = registry.async_get("sensor.nettigo_air_monitor_sds011_pm10") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_sds011_pm10") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_p1" @@ -328,7 +326,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.state == "19" assert state.attributes.get(ATTR_ICON) == "mdi:air-filter" - entry = registry.async_get( + entry = entity_registry.async_get( "sensor.nettigo_air_monitor_sds011_common_air_quality_index" ) assert entry @@ -349,7 +347,7 @@ async def test_sensor(hass: HomeAssistant) -> None: ] assert state.attributes.get(ATTR_ICON) == "mdi:air-filter" - entry = registry.async_get( + entry = entity_registry.async_get( "sensor.nettigo_air_monitor_sds011_common_air_quality_index_level" ) assert entry @@ -366,7 +364,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER ) - entry = registry.async_get("sensor.nettigo_air_monitor_sds011_pm2_5") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_sds011_pm2_5") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_p2" @@ -375,7 +373,7 @@ async def test_sensor(hass: HomeAssistant) -> None: assert state.state == "54" assert state.attributes.get(ATTR_ICON) == "mdi:air-filter" - entry = registry.async_get( + entry = entity_registry.async_get( "sensor.nettigo_air_monitor_sps30_common_air_quality_index" ) assert entry @@ -396,7 +394,7 @@ async def test_sensor(hass: HomeAssistant) -> None: ] assert state.attributes.get(ATTR_ICON) == "mdi:air-filter" - entry = registry.async_get( + entry = entity_registry.async_get( "sensor.nettigo_air_monitor_sps30_common_air_quality_index_level" ) assert entry @@ -413,7 +411,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER ) - entry = registry.async_get("sensor.nettigo_air_monitor_sps30_pm1") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm1") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p0" @@ -427,7 +425,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER ) - entry = registry.async_get("sensor.nettigo_air_monitor_sps30_pm10") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm10") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p1" @@ -441,7 +439,7 @@ async def test_sensor(hass: HomeAssistant) -> None: == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER ) - entry = registry.async_get("sensor.nettigo_air_monitor_sps30_pm2_5") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm2_5") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p2" @@ -455,7 +453,7 @@ async def test_sensor(hass: HomeAssistant) -> None: ) assert state.attributes.get(ATTR_ICON) == "mdi:molecule" - entry = registry.async_get("sensor.nettigo_air_monitor_sps30_pm4") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm4") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p4" @@ -468,24 +466,27 @@ async def test_sensor(hass: HomeAssistant) -> None: state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == CONCENTRATION_PARTS_PER_MILLION ) - entry = registry.async_get("sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide") + entry = entity_registry.async_get( + "sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide" + ) assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-mhz14a_carbon_dioxide" -async def test_sensor_disabled(hass: HomeAssistant) -> None: +async def test_sensor_disabled( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test sensor disabled by default.""" await init_integration(hass) - registry = er.async_get(hass) - entry = registry.async_get("sensor.nettigo_air_monitor_signal_strength") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_signal_strength") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-signal" assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION # Test enabling entity - updated_entry = registry.async_update_entity( + updated_entry = entity_registry.async_update_entity( entry.entity_id, **{"disabled_by": None} ) @@ -574,11 +575,11 @@ async def test_manual_update_entity(hass: HomeAssistant) -> None: assert mock_get_data.call_count == 1 -async def test_unique_id_migration(hass: HomeAssistant) -> None: +async def test_unique_id_migration( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: """Test states of the unique_id migration.""" - registry = er.async_get(hass) - - registry.async_get_or_create( + entity_registry.async_get_or_create( SENSOR_DOMAIN, DOMAIN, "aa:bb:cc:dd:ee:ff-temperature", @@ -586,7 +587,7 @@ async def test_unique_id_migration(hass: HomeAssistant) -> None: disabled_by=None, ) - registry.async_get_or_create( + entity_registry.async_get_or_create( SENSOR_DOMAIN, DOMAIN, "aa:bb:cc:dd:ee:ff-humidity", @@ -596,10 +597,10 @@ async def test_unique_id_migration(hass: HomeAssistant) -> None: await init_integration(hass) - entry = registry.async_get("sensor.nettigo_air_monitor_dht22_temperature") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_dht22_temperature") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-dht22_temperature" - entry = registry.async_get("sensor.nettigo_air_monitor_dht22_humidity") + entry = entity_registry.async_get("sensor.nettigo_air_monitor_dht22_humidity") assert entry assert entry.unique_id == "aa:bb:cc:dd:ee:ff-dht22_humidity" diff --git a/tests/components/netatmo/common.py b/tests/components/netatmo/common.py index 0776b80a3cd..61a7bc2354d 100644 --- a/tests/components/netatmo/common.py +++ b/tests/components/netatmo/common.py @@ -97,6 +97,6 @@ def selected_platforms(platforms): ), patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.netatmo.webhook_generate_url" + "homeassistant.components.netatmo.webhook_generate_url", ): yield diff --git a/tests/components/netatmo/test_api.py b/tests/components/netatmo/test_api.py new file mode 100644 index 00000000000..e2d495555c6 --- /dev/null +++ b/tests/components/netatmo/test_api.py @@ -0,0 +1,22 @@ +"""The tests for the Netatmo api.""" + +from pyatmo.const import ALL_SCOPES + +from homeassistant.components import cloud +from homeassistant.components.netatmo import api +from homeassistant.components.netatmo.const import API_SCOPES_EXCLUDED_FROM_CLOUD + + +async def test_get_api_scopes_cloud() -> None: + """Test method to get API scopes when using cloud auth implementation.""" + result = api.get_api_scopes(cloud.DOMAIN) + + for scope in API_SCOPES_EXCLUDED_FROM_CLOUD: + assert scope not in result + + +async def test_get_api_scopes_other() -> None: + """Test method to get API scopes when using cloud auth implementation.""" + result = api.get_api_scopes("netatmo_239846i2f0j2") + + assert sorted(ALL_SCOPES) == result diff --git a/tests/components/netatmo/test_camera.py b/tests/components/netatmo/test_camera.py index e9a66cfefc8..6dcc11d31ab 100644 --- a/tests/components/netatmo/test_camera.py +++ b/tests/components/netatmo/test_camera.py @@ -388,7 +388,7 @@ async def test_camera_reconnect_webhook(hass: HomeAssistant, config_entry) -> No ), patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.netatmo.webhook_generate_url" + "homeassistant.components.netatmo.webhook_generate_url", ) as mock_webhook: mock_auth.return_value.async_post_api_request.side_effect = fake_post mock_auth.return_value.async_addwebhook.side_effect = AsyncMock() @@ -482,7 +482,7 @@ async def test_setup_component_no_devices(hass: HomeAssistant, config_entry) -> ), patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.netatmo.webhook_generate_url" + "homeassistant.components.netatmo.webhook_generate_url", ): mock_auth.return_value.async_post_api_request.side_effect = fake_post_no_data mock_auth.return_value.async_addwebhook.side_effect = AsyncMock() @@ -522,7 +522,7 @@ async def test_camera_image_raises_exception( ), patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.netatmo.webhook_generate_url" + "homeassistant.components.netatmo.webhook_generate_url", ): mock_auth.return_value.async_post_api_request.side_effect = fake_post mock_auth.return_value.async_get_image.side_effect = fake_post diff --git a/tests/components/netatmo/test_climate.py b/tests/components/netatmo/test_climate.py index 99000403a38..848aad331bd 100644 --- a/tests/components/netatmo/test_climate.py +++ b/tests/components/netatmo/test_climate.py @@ -22,8 +22,14 @@ from homeassistant.components.netatmo.climate import PRESET_FROST_GUARD, PRESET_ from homeassistant.components.netatmo.const import ( ATTR_END_DATETIME, ATTR_SCHEDULE_NAME, + ATTR_TARGET_TEMPERATURE, + ATTR_TIME_PERIOD, + DOMAIN as NETATMO_DOMAIN, + SERVICE_CLEAR_TEMPERATURE_SETTING, SERVICE_SET_PRESET_MODE_WITH_END_DATETIME, SERVICE_SET_SCHEDULE, + SERVICE_SET_TEMPERATURE_WITH_END_DATETIME, + SERVICE_SET_TEMPERATURE_WITH_TIME_PERIOD, ) from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant @@ -359,6 +365,203 @@ async def test_service_preset_modes_thermostat( assert hass.states.get(climate_entity_livingroom).attributes["temperature"] == 30 +async def test_service_set_temperature_with_end_datetime( + hass: HomeAssistant, config_entry, netatmo_auth +) -> None: + """Test service setting temperature with an end datetime.""" + with selected_platforms(["climate"]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + await hass.async_block_till_done() + + webhook_id = config_entry.data[CONF_WEBHOOK_ID] + climate_entity_livingroom = "climate.livingroom" + + assert hass.states.get(climate_entity_livingroom).state == "auto" + + # Test service setting the temperature without an end datetime + await hass.services.async_call( + NETATMO_DOMAIN, + SERVICE_SET_TEMPERATURE_WITH_END_DATETIME, + { + ATTR_ENTITY_ID: climate_entity_livingroom, + ATTR_TARGET_TEMPERATURE: 25, + ATTR_END_DATETIME: "2023-11-17 12:23:00", + }, + blocking=True, + ) + await hass.async_block_till_done() + + # Test webhook room mode change to "manual" + response = { + "room_id": "2746182631", + "home": { + "id": "91763b24c43d3e344f424e8b", + "name": "MYHOME", + "country": "DE", + "rooms": [ + { + "id": "2746182631", + "name": "Livingroom", + "type": "livingroom", + "therm_setpoint_mode": "manual", + "therm_setpoint_temperature": 25, + "therm_setpoint_end_time": 1612749189, + } + ], + "modules": [ + {"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"} + ], + }, + "mode": "manual", + "event_type": "set_point", + "push_type": "display_change", + } + await simulate_webhook(hass, webhook_id, response) + + assert hass.states.get(climate_entity_livingroom).state == "heat" + assert hass.states.get(climate_entity_livingroom).attributes["temperature"] == 25 + + +async def test_service_set_temperature_with_time_period( + hass: HomeAssistant, config_entry, netatmo_auth +) -> None: + """Test service setting temperature with an end datetime.""" + with selected_platforms(["climate"]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + await hass.async_block_till_done() + + webhook_id = config_entry.data[CONF_WEBHOOK_ID] + climate_entity_livingroom = "climate.livingroom" + + assert hass.states.get(climate_entity_livingroom).state == "auto" + + # Test service setting the temperature without an end datetime + await hass.services.async_call( + NETATMO_DOMAIN, + SERVICE_SET_TEMPERATURE_WITH_TIME_PERIOD, + { + ATTR_ENTITY_ID: climate_entity_livingroom, + ATTR_TARGET_TEMPERATURE: 25, + ATTR_TIME_PERIOD: "02:24:00", + }, + blocking=True, + ) + await hass.async_block_till_done() + + # Test webhook room mode change to "manual" + response = { + "room_id": "2746182631", + "home": { + "id": "91763b24c43d3e344f424e8b", + "name": "MYHOME", + "country": "DE", + "rooms": [ + { + "id": "2746182631", + "name": "Livingroom", + "type": "livingroom", + "therm_setpoint_mode": "manual", + "therm_setpoint_temperature": 25, + "therm_setpoint_end_time": 1612749189, + } + ], + "modules": [ + {"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"} + ], + }, + "mode": "manual", + "event_type": "set_point", + "push_type": "display_change", + } + await simulate_webhook(hass, webhook_id, response) + + assert hass.states.get(climate_entity_livingroom).state == "heat" + assert hass.states.get(climate_entity_livingroom).attributes["temperature"] == 25 + + +async def test_service_clear_temperature_setting( + hass: HomeAssistant, config_entry, netatmo_auth +) -> None: + """Test service clearing temperature setting.""" + with selected_platforms(["climate"]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + await hass.async_block_till_done() + + webhook_id = config_entry.data[CONF_WEBHOOK_ID] + climate_entity_livingroom = "climate.livingroom" + + assert hass.states.get(climate_entity_livingroom).state == "auto" + + # Simulate a room thermostat change to manual boost + response = { + "room_id": "2746182631", + "home": { + "id": "91763b24c43d3e344f424e8b", + "name": "MYHOME", + "country": "DE", + "rooms": [ + { + "id": "2746182631", + "name": "Livingroom", + "type": "livingroom", + "therm_setpoint_mode": "manual", + "therm_setpoint_temperature": 25, + "therm_setpoint_end_time": 1612749189, + } + ], + "modules": [ + {"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"} + ], + }, + "mode": "manual", + "event_type": "set_point", + "push_type": "display_change", + } + await simulate_webhook(hass, webhook_id, response) + + assert hass.states.get(climate_entity_livingroom).state == "heat" + assert hass.states.get(climate_entity_livingroom).attributes["temperature"] == 25 + + # Test service setting the temperature without an end datetime + await hass.services.async_call( + NETATMO_DOMAIN, + SERVICE_CLEAR_TEMPERATURE_SETTING, + {ATTR_ENTITY_ID: climate_entity_livingroom}, + blocking=True, + ) + await hass.async_block_till_done() + + # Test webhook room mode change to "home" + response = { + "room_id": "2746182631", + "home": { + "id": "91763b24c43d3e344f424e8b", + "name": "MYHOME", + "country": "DE", + "rooms": [ + { + "id": "2746182631", + "name": "Livingroom", + "type": "livingroom", + "therm_setpoint_mode": "home", + } + ], + "modules": [ + {"id": "12:34:56:00:01:ae", "name": "Livingroom", "type": "NATherm1"} + ], + }, + "mode": "home", + "event_type": "cancel_set_point", + "push_type": "display_change", + } + await simulate_webhook(hass, webhook_id, response) + + assert hass.states.get(climate_entity_livingroom).state == "auto" + + async def test_webhook_event_handling_no_data( hass: HomeAssistant, config_entry, netatmo_auth ) -> None: diff --git a/tests/components/netatmo/test_diagnostics.py b/tests/components/netatmo/test_diagnostics.py index 0ece935abcb..19f83830a4e 100644 --- a/tests/components/netatmo/test_diagnostics.py +++ b/tests/components/netatmo/test_diagnostics.py @@ -25,7 +25,7 @@ async def test_entry_diagnostics( ) as mock_auth, patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.netatmo.webhook_generate_url" + "homeassistant.components.netatmo.webhook_generate_url", ): mock_auth.return_value.async_post_api_request.side_effect = fake_post_request mock_auth.return_value.async_addwebhook.side_effect = AsyncMock() diff --git a/tests/components/netatmo/test_init.py b/tests/components/netatmo/test_init.py index e04295ae668..75b1e9e47e6 100644 --- a/tests/components/netatmo/test_init.py +++ b/tests/components/netatmo/test_init.py @@ -205,7 +205,7 @@ async def test_setup_with_cloud(hass: HomeAssistant, config_entry) -> None: ), patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.netatmo.webhook_generate_url" + "homeassistant.components.netatmo.webhook_generate_url", ): mock_auth.return_value.async_post_api_request.side_effect = fake_post_request assert await async_setup_component( @@ -271,7 +271,7 @@ async def test_setup_with_cloudhook(hass: HomeAssistant) -> None: ), patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.netatmo.webhook_generate_url" + "homeassistant.components.netatmo.webhook_generate_url", ): mock_auth.return_value.async_post_api_request.side_effect = fake_post_request mock_auth.return_value.async_addwebhook.side_effect = AsyncMock() diff --git a/tests/components/netatmo/test_light.py b/tests/components/netatmo/test_light.py index 83218b6d6d1..b6df9191976 100644 --- a/tests/components/netatmo/test_light.py +++ b/tests/components/netatmo/test_light.py @@ -103,7 +103,7 @@ async def test_setup_component_no_devices(hass: HomeAssistant, config_entry) -> ), patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.netatmo.webhook_generate_url" + "homeassistant.components.netatmo.webhook_generate_url", ): mock_auth.return_value.async_post_api_request.side_effect = ( fake_post_request_no_data diff --git a/tests/components/nws/snapshots/test_weather.ambr b/tests/components/nws/snapshots/test_weather.ambr index 0dddca954be..0db2311085c 100644 --- a/tests/components/nws/snapshots/test_weather.ambr +++ b/tests/components/nws/snapshots/test_weather.ambr @@ -103,6 +103,309 @@ ]), }) # --- +# name: test_forecast_service[forecast] + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'is_daytime': False, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].1 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].2 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'is_daytime': False, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].3 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].4 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[forecast].5 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'is_daytime': False, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'is_daytime': False, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].3 + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].4 + dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecast].5 + dict({ + 'forecast': list([ + ]), + }) +# --- +# name: test_forecast_service[get_forecasts] + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'is_daytime': False, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].1 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].2 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'is_daytime': False, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].3 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].4 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + dict({ + 'condition': 'lightning-rainy', + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'dew_point': -15.6, + 'humidity': 75, + 'precipitation_probability': 89, + 'temperature': -12.2, + 'wind_bearing': 180, + 'wind_speed': 16.09, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecasts].5 + dict({ + 'weather.abc_daynight': dict({ + 'forecast': list([ + ]), + }), + }) +# --- # name: test_forecast_subscription[hourly-weather.abc_daynight] list([ dict({ diff --git a/tests/components/nws/test_weather.py b/tests/components/nws/test_weather.py index 54069eec02c..c7478be7c07 100644 --- a/tests/components/nws/test_weather.py +++ b/tests/components/nws/test_weather.py @@ -13,7 +13,8 @@ from homeassistant.components.weather import ( ATTR_CONDITION_SUNNY, ATTR_FORECAST, DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, ) from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -400,12 +401,20 @@ async def test_legacy_config_entry(hass: HomeAssistant, no_sensor) -> None: assert len(er.async_entries_for_config_entry(registry, entry.entry_id)) == 2 +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) async def test_forecast_service( hass: HomeAssistant, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, mock_simple_nws, no_sensor, + service: str, ) -> None: """Test multiple forecast.""" instance = mock_simple_nws.return_value @@ -425,7 +434,7 @@ async def test_forecast_service( for forecast_type in ("twice_daily", "hourly"): response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.abc_daynight", "type": forecast_type, @@ -433,7 +442,6 @@ async def test_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot # Calling the services should use cached data @@ -453,7 +461,7 @@ async def test_forecast_service( for forecast_type in ("twice_daily", "hourly"): response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.abc_daynight", "type": forecast_type, @@ -461,7 +469,6 @@ async def test_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot # Calling the services should update the hourly forecast @@ -477,7 +484,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.abc_daynight", "type": "hourly", @@ -485,7 +492,6 @@ async def test_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot # after additional 35 minutes data caching expires, data is no longer shown @@ -495,7 +501,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.abc_daynight", "type": "hourly", @@ -503,7 +509,7 @@ async def test_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] == [] + assert response == snapshot @pytest.mark.parametrize( diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index c888381230c..47568a7d760 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -101,7 +101,8 @@ async def mock_supervisor_fixture(hass, aioclient_mock): "homeassistant.components.hassio.HassIO.get_ingress_panels", return_value={"panels": {}}, ), patch.dict( - os.environ, {"SUPERVISOR_TOKEN": "123456"} + os.environ, + {"SUPERVISOR_TOKEN": "123456"}, ): yield diff --git a/tests/components/openai_conversation/conftest.py b/tests/components/openai_conversation/conftest.py index 9f00290600e..40f2eb33f08 100644 --- a/tests/components/openai_conversation/conftest.py +++ b/tests/components/openai_conversation/conftest.py @@ -25,7 +25,7 @@ def mock_config_entry(hass): async def mock_init_component(hass, mock_config_entry): """Initialize integration.""" with patch( - "openai.Engine.list", + "openai.Model.list", ): assert await async_setup_component(hass, "openai_conversation", {}) await hass.async_block_till_done() diff --git a/tests/components/openai_conversation/test_config_flow.py b/tests/components/openai_conversation/test_config_flow.py index 471be8035b6..43dfc26ca82 100644 --- a/tests/components/openai_conversation/test_config_flow.py +++ b/tests/components/openai_conversation/test_config_flow.py @@ -32,7 +32,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result["errors"] is None with patch( - "homeassistant.components.openai_conversation.config_flow.openai.Engine.list", + "homeassistant.components.openai_conversation.config_flow.openai.Model.list", ), patch( "homeassistant.components.openai_conversation.async_setup_entry", return_value=True, @@ -88,7 +88,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non ) with patch( - "homeassistant.components.openai_conversation.config_flow.openai.Engine.list", + "homeassistant.components.openai_conversation.config_flow.openai.Model.list", side_effect=side_effect, ): result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/openai_conversation/test_init.py b/tests/components/openai_conversation/test_init.py index 1b145d9d545..61fe33e5469 100644 --- a/tests/components/openai_conversation/test_init.py +++ b/tests/components/openai_conversation/test_init.py @@ -140,7 +140,7 @@ async def test_template_error( }, ) with patch( - "openai.Engine.list", + "openai.Model.list", ), patch("openai.ChatCompletion.acreate"): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index 0f2c15a5e4a..ef1ac166f1e 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -210,9 +210,11 @@ async def test_options_migration(hass: HomeAssistant) -> None: "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.connect_and_subscribe", return_value=True, ), patch( - "homeassistant.components.opentherm_gw.async_setup", return_value=True + "homeassistant.components.opentherm_gw.async_setup", + return_value=True, ), patch( - "pyotgw.status.StatusManager._process_updates", return_value=None + "pyotgw.status.StatusManager._process_updates", + return_value=None, ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/otbr/test_util.py b/tests/components/otbr/test_util.py index 171a607d200..941c80a52da 100644 --- a/tests/components/otbr/test_util.py +++ b/tests/components/otbr/test_util.py @@ -73,7 +73,7 @@ async def test_factory_reset_error_1( ) as factory_reset_mock, patch( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, pytest.raises( - HomeAssistantError + HomeAssistantError, ): await data.factory_reset() @@ -94,7 +94,7 @@ async def test_factory_reset_error_2( "python_otbr_api.OTBR.delete_active_dataset", side_effect=python_otbr_api.OTBRError, ) as delete_active_dataset_mock, pytest.raises( - HomeAssistantError + HomeAssistantError, ): await data.factory_reset() diff --git a/tests/components/otbr/test_websocket_api.py b/tests/components/otbr/test_websocket_api.py index cba046a2a9d..8288e7e9f70 100644 --- a/tests/components/otbr/test_websocket_api.py +++ b/tests/components/otbr/test_websocket_api.py @@ -189,7 +189,7 @@ async def test_create_network_fails_3( ), patch( "python_otbr_api.OTBR.create_active_dataset", ), patch( - "python_otbr_api.OTBR.factory_reset" + "python_otbr_api.OTBR.factory_reset", ): await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() @@ -211,7 +211,7 @@ async def test_create_network_fails_4( "python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=python_otbr_api.OTBRError, ), patch( - "python_otbr_api.OTBR.factory_reset" + "python_otbr_api.OTBR.factory_reset", ): await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) msg = await websocket_client.receive_json() diff --git a/tests/components/ourgroceries/__init__.py b/tests/components/ourgroceries/__init__.py new file mode 100644 index 00000000000..67fcb439908 --- /dev/null +++ b/tests/components/ourgroceries/__init__.py @@ -0,0 +1,6 @@ +"""Tests for the OurGroceries integration.""" + + +def items_to_shopping_list(items: list) -> dict[dict[list]]: + """Convert a list of items into a shopping list.""" + return {"list": {"items": items}} diff --git a/tests/components/ourgroceries/conftest.py b/tests/components/ourgroceries/conftest.py new file mode 100644 index 00000000000..7f113da2633 --- /dev/null +++ b/tests/components/ourgroceries/conftest.py @@ -0,0 +1,68 @@ +"""Common fixtures for the OurGroceries tests.""" +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.ourgroceries import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import items_to_shopping_list + +from tests.common import MockConfigEntry + +USERNAME = "test-username" +PASSWORD = "test-password" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.ourgroceries.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="ourgroceries_config_entry") +def mock_ourgroceries_config_entry() -> MockConfigEntry: + """Mock ourgroceries configuration.""" + return MockConfigEntry( + domain=DOMAIN, data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD} + ) + + +@pytest.fixture(name="items") +def mock_items() -> dict: + """Mock a collection of shopping list items.""" + return [] + + +@pytest.fixture(name="ourgroceries") +def mock_ourgroceries(items: list[dict]) -> AsyncMock: + """Mock the OurGroceries api.""" + og = AsyncMock() + og.login.return_value = True + og.get_my_lists.return_value = { + "shoppingLists": [{"id": "test_list", "name": "Test List"}] + } + og.get_list_items.return_value = items_to_shopping_list(items) + return og + + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, + ourgroceries: AsyncMock, + ourgroceries_config_entry: MockConfigEntry, +) -> None: + """Mock setup of the ourgroceries integration.""" + ourgroceries_config_entry.add_to_hass(hass) + with patch( + "homeassistant.components.ourgroceries.OurGroceries", return_value=ourgroceries + ): + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + yield diff --git a/tests/components/ourgroceries/test_config_flow.py b/tests/components/ourgroceries/test_config_flow.py new file mode 100644 index 00000000000..f9d274125c1 --- /dev/null +++ b/tests/components/ourgroceries/test_config_flow.py @@ -0,0 +1,96 @@ +"""Test the OurGroceries config flow.""" +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.ourgroceries.config_flow import ( + AsyncIOTimeoutError, + ClientError, + InvalidLoginException, +) +from homeassistant.components.ourgroceries.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + + +async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "homeassistant.components.ourgroceries.config_flow.OurGroceries.login", + return_value=True, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] == FlowResultType.CREATE_ENTRY + assert result2["title"] == "test-username" + assert result2["data"] == { + "username": "test-username", + "password": "test-password", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (InvalidLoginException, "invalid_auth"), + (ClientError, "cannot_connect"), + (AsyncIOTimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_error( + hass: HomeAssistant, exception: Exception, error: str, mock_setup_entry: AsyncMock +) -> None: + """Test we handle form errors.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.ourgroceries.config_flow.OurGroceries.login", + side_effect=exception, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "test-password", + }, + ) + + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": error} + with patch( + "homeassistant.components.ourgroceries.config_flow.OurGroceries.login", + return_value=True, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + "username": "test-username", + "password": "test-password", + }, + ) + + assert result3["type"] == FlowResultType.CREATE_ENTRY + assert result3["title"] == "test-username" + assert result3["data"] == { + "username": "test-username", + "password": "test-password", + } + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/ourgroceries/test_init.py b/tests/components/ourgroceries/test_init.py new file mode 100644 index 00000000000..ef96c5e811c --- /dev/null +++ b/tests/components/ourgroceries/test_init.py @@ -0,0 +1,55 @@ +"""Unit tests for the OurGroceries integration.""" +from unittest.mock import AsyncMock + +import pytest + +from homeassistant.components.ourgroceries import ( + AsyncIOTimeoutError, + ClientError, + InvalidLoginException, +) +from homeassistant.components.ourgroceries.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_load_unload( + hass: HomeAssistant, + setup_integration: None, + ourgroceries_config_entry: MockConfigEntry | None, +) -> None: + """Test loading and unloading of the config entry.""" + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + + assert ourgroceries_config_entry.state == ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(ourgroceries_config_entry.entry_id) + assert ourgroceries_config_entry.state == ConfigEntryState.NOT_LOADED + + +@pytest.fixture +def login_with_error(exception, ourgroceries: AsyncMock): + """Fixture to simulate error on login.""" + ourgroceries.login.side_effect = (exception,) + + +@pytest.mark.parametrize( + ("exception", "status"), + [ + (InvalidLoginException, ConfigEntryState.SETUP_ERROR), + (ClientError, ConfigEntryState.SETUP_RETRY), + (AsyncIOTimeoutError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_init_failure( + hass: HomeAssistant, + login_with_error, + setup_integration: None, + status: ConfigEntryState, + ourgroceries_config_entry: MockConfigEntry | None, +) -> None: + """Test an initialization error on integration load.""" + assert ourgroceries_config_entry.state == status diff --git a/tests/components/ourgroceries/test_todo.py b/tests/components/ourgroceries/test_todo.py new file mode 100644 index 00000000000..65bbff0e601 --- /dev/null +++ b/tests/components/ourgroceries/test_todo.py @@ -0,0 +1,243 @@ +"""Unit tests for the OurGroceries todo platform.""" +from asyncio import TimeoutError as AsyncIOTimeoutError +from unittest.mock import AsyncMock + +from aiohttp import ClientError +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.ourgroceries.coordinator import SCAN_INTERVAL +from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_component import async_update_entity + +from . import items_to_shopping_list + +from tests.common import async_fire_time_changed + + +@pytest.mark.parametrize( + ("items", "expected_state"), + [ + ([], "0"), + ([{"id": "12345", "name": "Soda"}], "1"), + ([{"id": "12345", "name": "Soda", "crossedOffAt": 1699107501}], "0"), + ( + [ + {"id": "12345", "name": "Soda"}, + {"id": "54321", "name": "Milk"}, + ], + "2", + ), + ], +) +async def test_todo_item_state( + hass: HomeAssistant, + setup_integration: None, + expected_state: str, +) -> None: + """Test for a shopping list entity state.""" + + state = hass.states.get("todo.test_list") + assert state + assert state.state == expected_state + + +async def test_add_todo_list_item( + hass: HomeAssistant, + setup_integration: None, + ourgroceries: AsyncMock, +) -> None: + """Test for adding an item.""" + + state = hass.states.get("todo.test_list") + assert state + assert state.state == "0" + + ourgroceries.add_item_to_list = AsyncMock() + # Fake API response when state is refreshed after create + ourgroceries.get_list_items.return_value = items_to_shopping_list( + [{"id": "12345", "name": "Soda"}] + ) + + await hass.services.async_call( + TODO_DOMAIN, + "add_item", + {"item": "Soda"}, + target={"entity_id": "todo.test_list"}, + blocking=True, + ) + + args = ourgroceries.add_item_to_list.call_args + assert args + assert args.args == ("test_list", "Soda") + assert args.kwargs.get("auto_category") is True + + # Verify state is refreshed + state = hass.states.get("todo.test_list") + assert state + assert state.state == "1" + + +@pytest.mark.parametrize(("items"), [[{"id": "12345", "name": "Soda"}]]) +async def test_update_todo_item_status( + hass: HomeAssistant, + setup_integration: None, + ourgroceries: AsyncMock, +) -> None: + """Test for updating the completion status of an item.""" + + state = hass.states.get("todo.test_list") + assert state + assert state.state == "1" + + ourgroceries.toggle_item_crossed_off = AsyncMock() + + # Fake API response when state is refreshed after crossing off + ourgroceries.get_list_items.return_value = items_to_shopping_list( + [{"id": "12345", "name": "Soda", "crossedOffAt": 1699107501}] + ) + + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + {"item": "12345", "status": "completed"}, + target={"entity_id": "todo.test_list"}, + blocking=True, + ) + assert ourgroceries.toggle_item_crossed_off.called + args = ourgroceries.toggle_item_crossed_off.call_args + assert args + assert args.args == ("test_list", "12345") + assert args.kwargs.get("cross_off") is True + + # Verify state is refreshed + state = hass.states.get("todo.test_list") + assert state + assert state.state == "0" + + # Fake API response when state is refreshed after reopen + ourgroceries.get_list_items.return_value = items_to_shopping_list( + [{"id": "12345", "name": "Soda"}] + ) + + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + {"item": "12345", "status": "needs_action"}, + target={"entity_id": "todo.test_list"}, + blocking=True, + ) + assert ourgroceries.toggle_item_crossed_off.called + args = ourgroceries.toggle_item_crossed_off.call_args + assert args + assert args.args == ("test_list", "12345") + assert args.kwargs.get("cross_off") is False + + # Verify state is refreshed + state = hass.states.get("todo.test_list") + assert state + assert state.state == "1" + + +@pytest.mark.parametrize( + ("items"), [[{"id": "12345", "name": "Soda", "categoryId": "test_category"}]] +) +async def test_update_todo_item_summary( + hass: HomeAssistant, + setup_integration: None, + ourgroceries: AsyncMock, +) -> None: + """Test for updating an item summary.""" + + state = hass.states.get("todo.test_list") + assert state + assert state.state == "1" + + ourgroceries.change_item_on_list = AsyncMock() + + # Fake API response when state is refreshed update + ourgroceries.get_list_items.return_value = items_to_shopping_list( + [{"id": "12345", "name": "Milk"}] + ) + + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + {"item": "12345", "rename": "Milk"}, + target={"entity_id": "todo.test_list"}, + blocking=True, + ) + assert ourgroceries.change_item_on_list + args = ourgroceries.change_item_on_list.call_args + assert args.args == ("test_list", "12345", "test_category", "Milk") + + +@pytest.mark.parametrize( + ("items"), + [ + [ + {"id": "12345", "name": "Soda"}, + {"id": "54321", "name": "Milk"}, + ] + ], +) +async def test_remove_todo_item( + hass: HomeAssistant, + setup_integration: None, + ourgroceries: AsyncMock, +) -> None: + """Test for removing an item.""" + + state = hass.states.get("todo.test_list") + assert state + assert state.state == "2" + + ourgroceries.remove_item_from_list = AsyncMock() + # Fake API response when state is refreshed after remove + ourgroceries.get_list_items.return_value = items_to_shopping_list([]) + + await hass.services.async_call( + TODO_DOMAIN, + "remove_item", + {"item": ["12345", "54321"]}, + target={"entity_id": "todo.test_list"}, + blocking=True, + ) + assert ourgroceries.remove_item_from_list.call_count == 2 + args = ourgroceries.remove_item_from_list.call_args_list + assert args[0].args == ("test_list", "12345") + assert args[1].args == ("test_list", "54321") + + await async_update_entity(hass, "todo.test_list") + state = hass.states.get("todo.test_list") + assert state + assert state.state == "0" + + +@pytest.mark.parametrize( + ("exception"), + [ + (ClientError), + (AsyncIOTimeoutError), + ], +) +async def test_coordinator_error( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + setup_integration: None, + ourgroceries: AsyncMock, + exception: Exception, +) -> None: + """Test error on coordinator update.""" + state = hass.states.get("todo.test_list") + assert state.state == "0" + + ourgroceries.get_list_items.side_effect = exception + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("todo.test_list") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/overkiz/conftest.py b/tests/components/overkiz/conftest.py index 990b88d84ed..da6d3a60839 100644 --- a/tests/components/overkiz/conftest.py +++ b/tests/components/overkiz/conftest.py @@ -12,8 +12,8 @@ from tests.components.overkiz import load_setup_fixture from tests.components.overkiz.test_config_flow import ( TEST_EMAIL, TEST_GATEWAY_ID, - TEST_HUB, TEST_PASSWORD, + TEST_SERVER, ) MOCK_SETUP_RESPONSE = Mock(devices=[], gateways=[]) @@ -26,7 +26,7 @@ def mock_config_entry() -> MockConfigEntry: title="Somfy TaHoma Switch", domain=DOMAIN, unique_id=TEST_GATEWAY_ID, - data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_SERVER}, ) diff --git a/tests/components/overkiz/snapshots/test_diagnostics.ambr b/tests/components/overkiz/snapshots/test_diagnostics.ambr index 06a456f88af..a4ba28ec935 100644 --- a/tests/components/overkiz/snapshots/test_diagnostics.ambr +++ b/tests/components/overkiz/snapshots/test_diagnostics.ambr @@ -1,6 +1,7 @@ # serializer version: 1 # name: test_device_diagnostics dict({ + 'api_type': 'cloud', 'device': dict({ 'controllable_name': 'rts:RollerShutterRTSComponent', 'device_url': 'rts://****-****-6867/16756006', @@ -969,6 +970,7 @@ # --- # name: test_diagnostics dict({ + 'api_type': 'cloud', 'execution_history': list([ ]), 'server': 'somfy_europe', diff --git a/tests/components/overkiz/test_config_flow.py b/tests/components/overkiz/test_config_flow.py index a9d950a3a66..146d54feb9c 100644 --- a/tests/components/overkiz/test_config_flow.py +++ b/tests/components/overkiz/test_config_flow.py @@ -1,13 +1,14 @@ -"""Tests for Overkiz (by Somfy) config flow.""" +"""Tests for Overkiz config flow.""" from __future__ import annotations from ipaddress import ip_address from unittest.mock import AsyncMock, Mock, patch -from aiohttp import ClientError +from aiohttp import ClientConnectorCertificateError, ClientError from pyoverkiz.exceptions import ( BadCredentialsException, MaintenanceException, + NotSuchTokenException, TooManyAttemptsBannedException, TooManyRequestsException, UnknownUserException, @@ -28,14 +29,18 @@ TEST_EMAIL = "test@testdomain.com" TEST_EMAIL2 = "test@testdomain.nl" TEST_PASSWORD = "test-password" TEST_PASSWORD2 = "test-password2" -TEST_HUB = "somfy_europe" -TEST_HUB2 = "hi_kumo_europe" -TEST_HUB_COZYTOUCH = "atlantic_cozytouch" +TEST_SERVER = "somfy_europe" +TEST_SERVER2 = "hi_kumo_europe" +TEST_SERVER_COZYTOUCH = "atlantic_cozytouch" TEST_GATEWAY_ID = "1234-5678-9123" TEST_GATEWAY_ID2 = "4321-5678-9123" +TEST_GATEWAY_ID3 = "SOMFY_PROTECT-v0NT53occUBPyuJRzx59kalW1hFfzimN" + +TEST_HOST = "gateway-1234-5678-9123.local:8443" +TEST_HOST2 = "192.168.11.104:8443" MOCK_GATEWAY_RESPONSE = [Mock(id=TEST_GATEWAY_ID)] -MOCK_GATEWAY2_RESPONSE = [Mock(id=TEST_GATEWAY_ID2)] +MOCK_GATEWAY2_RESPONSE = [Mock(id=TEST_GATEWAY_ID3), Mock(id=TEST_GATEWAY_ID2)] FAKE_ZERO_CONF_INFO = ZeroconfServiceInfo( ip_address=ip_address("192.168.0.51"), @@ -51,31 +56,133 @@ FAKE_ZERO_CONF_INFO = ZeroconfServiceInfo( }, ) +FAKE_ZERO_CONF_INFO_LOCAL = ZeroconfServiceInfo( + ip_address=ip_address("192.168.0.51"), + ip_addresses=[ip_address("192.168.0.51")], + port=8443, + hostname=f"gateway-{TEST_GATEWAY_ID}.local.", + type="_kizboxdev._tcp.local.", + name=f"gateway-{TEST_GATEWAY_ID}._kizboxdev._tcp.local.", + properties={ + "api_version": "1", + "gateway_pin": TEST_GATEWAY_ID, + "fw_version": "2021.5.4-29", + }, +) -async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + +async def test_form_cloud(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" - assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "cloud"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "cloud" with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( - "pyoverkiz.client.OverkizClient.get_gateways", return_value=None + "pyoverkiz.client.OverkizClient.get_gateways", + return_value=MOCK_GATEWAY_RESPONSE, ): - result2 = await hass.config_entries.flow.async_configure( + await hass.config_entries.flow.async_configure( result["flow_id"], - {"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + {"username": TEST_EMAIL, "password": TEST_PASSWORD}, ) - assert result2["type"] == "create_entry" - assert result2["title"] == TEST_EMAIL - assert result2["data"] == { - "username": TEST_EMAIL, - "password": TEST_PASSWORD, - "hub": TEST_HUB, - } + await hass.async_block_till_done() + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_only_cloud_supported( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER2}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "cloud" + + with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( + "pyoverkiz.client.OverkizClient.get_gateways", + return_value=MOCK_GATEWAY_RESPONSE, + ): + await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": TEST_EMAIL, "password": TEST_PASSWORD}, + ) + + await hass.async_block_till_done() + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_local_happy_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "local"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "local" + + with patch.multiple( + "pyoverkiz.client.OverkizClient", + login=AsyncMock(return_value=True), + get_gateways=AsyncMock(return_value=MOCK_GATEWAY_RESPONSE), + get_setup_option=AsyncMock(return_value=True), + generate_local_token=AsyncMock(return_value="1234123412341234"), + activate_local_token=AsyncMock(return_value=True), + ): + await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "host": "gateway-1234-5678-1234.local:8443", + }, + ) await hass.async_block_till_done() @@ -95,23 +202,149 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: (Exception, "unknown"), ], ) -async def test_form_invalid_auth( +async def test_form_invalid_auth_cloud( hass: HomeAssistant, side_effect: Exception, error: str ) -> None: - """Test we handle invalid auth.""" + """Test we handle invalid auth (cloud).""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "cloud"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "cloud" + with patch("pyoverkiz.client.OverkizClient.login", side_effect=side_effect): - result2 = await hass.config_entries.flow.async_configure( + result4 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + {"username": TEST_EMAIL, "password": TEST_PASSWORD}, ) - assert result["step_id"] == config_entries.SOURCE_USER - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result2["errors"] == {"base": error} + await hass.async_block_till_done() + + assert result4["type"] == data_entry_flow.FlowResultType.FORM + assert result4["errors"] == {"base": error} + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (BadCredentialsException, "invalid_auth"), + (TooManyRequestsException, "too_many_requests"), + ( + ClientConnectorCertificateError(Mock(host=TEST_HOST), Exception), + "certificate_verify_failed", + ), + (TimeoutError, "cannot_connect"), + (ClientError, "cannot_connect"), + (MaintenanceException, "server_in_maintenance"), + (TooManyAttemptsBannedException, "too_many_attempts"), + (UnknownUserException, "unsupported_hardware"), + (NotSuchTokenException, "no_such_token"), + (Exception, "unknown"), + ], +) +async def test_form_invalid_auth_local( + hass: HomeAssistant, side_effect: Exception, error: str +) -> None: + """Test we handle invalid auth (local).""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "local"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "local" + + with patch("pyoverkiz.client.OverkizClient.login", side_effect=side_effect): + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": TEST_HOST, + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "verify_ssl": True, + }, + ) + + await hass.async_block_till_done() + + assert result4["type"] == data_entry_flow.FlowResultType.FORM + assert result4["errors"] == {"base": error} + + +async def test_form_local_developer_mode_disabled( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "local"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "local" + + with patch.multiple( + "pyoverkiz.client.OverkizClient", + login=AsyncMock(return_value=True), + get_gateways=AsyncMock(return_value=MOCK_GATEWAY_RESPONSE), + get_setup_option=AsyncMock(return_value=None), + ): + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "host": "gateway-1234-5678-1234.local:8443", + "verify_ssl": True, + }, + ) + + assert result4["type"] == data_entry_flow.FlowResultType.FORM + assert result4["errors"] == {"base": "developer_mode_disabled"} @pytest.mark.parametrize( @@ -123,79 +356,398 @@ async def test_form_invalid_auth( async def test_form_invalid_cozytouch_auth( hass: HomeAssistant, side_effect: Exception, error: str ) -> None: - """Test we handle invalid auth from CozyTouch.""" + """Test we handle invalid auth (cloud).""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER_COZYTOUCH}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "cloud" + with patch("pyoverkiz.client.OverkizClient.login", side_effect=side_effect): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "username": TEST_EMAIL, - "password": TEST_PASSWORD, - "hub": TEST_HUB_COZYTOUCH, - }, - ) - - assert result["step_id"] == config_entries.SOURCE_USER - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result2["errors"] == {"base": error} - - -async def test_abort_on_duplicate_entry(hass: HomeAssistant) -> None: - """Test config flow aborts Config Flow on duplicate entries.""" - MockConfigEntry( - domain=DOMAIN, - unique_id=TEST_GATEWAY_ID, - data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, - ).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( - "pyoverkiz.client.OverkizClient.get_gateways", - return_value=MOCK_GATEWAY_RESPONSE, - ): - result2 = await hass.config_entries.flow.async_configure( + result3 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": TEST_EMAIL, "password": TEST_PASSWORD}, ) - assert result2["type"] == data_entry_flow.FlowResultType.ABORT - assert result2["reason"] == "already_configured" + await hass.async_block_till_done() + + assert result3["type"] == data_entry_flow.FlowResultType.FORM + assert result3["errors"] == {"base": error} + assert result3["step_id"] == "cloud" -async def test_allow_multiple_unique_entries(hass: HomeAssistant) -> None: - """Test config flow allows Config Flow unique entries.""" +async def test_cloud_abort_on_duplicate_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + MockConfigEntry( domain=DOMAIN, - unique_id=TEST_GATEWAY_ID2, - data={"username": "test2@testdomain.com", "password": TEST_PASSWORD}, + unique_id=TEST_GATEWAY_ID, + data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_SERVER}, ).add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "cloud"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "cloud" + + with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( + "pyoverkiz.client.OverkizClient.get_gateways", + return_value=MOCK_GATEWAY_RESPONSE, + ): + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": TEST_EMAIL, "password": TEST_PASSWORD}, + ) + + assert result4["type"] == data_entry_flow.FlowResultType.ABORT + assert result4["reason"] == "already_configured" + + +async def test_local_abort_on_duplicate_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + + MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_GATEWAY_ID, + data={ + "host": TEST_HOST, + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "hub": TEST_SERVER, + }, + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "local"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "local" + + with patch.multiple( + "pyoverkiz.client.OverkizClient", + login=AsyncMock(return_value=True), + get_gateways=AsyncMock(return_value=MOCK_GATEWAY_RESPONSE), + get_setup_option=AsyncMock(return_value=True), + generate_local_token=AsyncMock(return_value="1234123412341234"), + activate_local_token=AsyncMock(return_value=True), + ): + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": TEST_HOST, + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "verify_ssl": True, + }, + ) + + assert result4["type"] == data_entry_flow.FlowResultType.ABORT + assert result4["reason"] == "already_configured" + + +async def test_cloud_allow_multiple_unique_entries( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + + MockConfigEntry( + version=1, + domain=DOMAIN, + unique_id=TEST_GATEWAY_ID2, + data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_SERVER}, + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] == "form" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "cloud"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "cloud" + + with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( + "pyoverkiz.client.OverkizClient.get_gateways", + return_value=MOCK_GATEWAY_RESPONSE, + ): + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": TEST_EMAIL, "password": TEST_PASSWORD}, + ) + + assert result4["type"] == "create_entry" + assert result4["title"] == TEST_EMAIL + assert result4["data"] == { + "api_type": "cloud", + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "hub": TEST_SERVER, + } + + +async def test_cloud_reauth_success(hass: HomeAssistant) -> None: + """Test reauthentication flow.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_GATEWAY_ID, + version=2, + data={ + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "hub": TEST_SERVER2, + "api_type": "cloud", + }, + ) + mock_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "cloud" + with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( "pyoverkiz.client.OverkizClient.get_gateways", return_value=MOCK_GATEWAY_RESPONSE, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + user_input={ + "username": TEST_EMAIL, + "password": TEST_PASSWORD2, + }, ) - assert result2["type"] == "create_entry" - assert result2["title"] == TEST_EMAIL - assert result2["data"] == { - "username": TEST_EMAIL, - "password": TEST_PASSWORD, - "hub": TEST_HUB, - } + assert result2["type"] == data_entry_flow.FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert mock_entry.data["username"] == TEST_EMAIL + assert mock_entry.data["password"] == TEST_PASSWORD2 + + +async def test_cloud_reauth_wrong_account(hass: HomeAssistant) -> None: + """Test reauthentication flow.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_GATEWAY_ID, + version=2, + data={ + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "hub": TEST_SERVER2, + "api_type": "cloud", + }, + ) + mock_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "cloud" + + with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( + "pyoverkiz.client.OverkizClient.get_gateways", + return_value=MOCK_GATEWAY2_RESPONSE, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + "username": TEST_EMAIL, + "password": TEST_PASSWORD2, + }, + ) + + assert result2["type"] == data_entry_flow.FlowResultType.ABORT + assert result2["reason"] == "reauth_wrong_account" + + +async def test_local_reauth_success(hass: HomeAssistant) -> None: + """Test reauthentication flow.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_GATEWAY_ID, + version=2, + data={ + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "hub": TEST_SERVER, + "host": TEST_HOST, + "api_type": "local", + }, + ) + mock_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "local_or_cloud" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "local"}, + ) + + assert result2["step_id"] == "local" + + with patch.multiple( + "pyoverkiz.client.OverkizClient", + login=AsyncMock(return_value=True), + get_gateways=AsyncMock(return_value=MOCK_GATEWAY_RESPONSE), + get_setup_option=AsyncMock(return_value=True), + generate_local_token=AsyncMock(return_value="1234123412341234"), + activate_local_token=AsyncMock(return_value=True), + ): + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + "username": TEST_EMAIL, + "password": TEST_PASSWORD2, + }, + ) + + assert result3["type"] == data_entry_flow.FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" + assert mock_entry.data["username"] == TEST_EMAIL + assert mock_entry.data["password"] == TEST_PASSWORD2 + + +async def test_local_reauth_wrong_account(hass: HomeAssistant) -> None: + """Test reauthentication flow.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_GATEWAY_ID2, + version=2, + data={ + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "hub": TEST_SERVER, + "host": TEST_HOST, + "api_type": "local", + }, + ) + mock_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "unique_id": mock_entry.unique_id, + "entry_id": mock_entry.entry_id, + }, + data=mock_entry.data, + ) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "local_or_cloud" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "local"}, + ) + + assert result2["step_id"] == "local" + + with patch.multiple( + "pyoverkiz.client.OverkizClient", + login=AsyncMock(return_value=True), + get_gateways=AsyncMock(return_value=MOCK_GATEWAY_RESPONSE), + get_setup_option=AsyncMock(return_value=True), + generate_local_token=AsyncMock(return_value="1234123412341234"), + activate_local_token=AsyncMock(return_value=True), + ): + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + "username": TEST_EMAIL, + "password": TEST_PASSWORD2, + }, + ) + + assert result3["type"] == data_entry_flow.FlowResultType.ABORT + assert result3["reason"] == "reauth_wrong_account" async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: @@ -213,20 +765,37 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == config_entries.SOURCE_USER + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "cloud"}, + ) + with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( "pyoverkiz.client.OverkizClient.get_gateways", return_value=None ): - result2 = await hass.config_entries.flow.async_configure( + result4 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + { + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + }, ) - assert result2["type"] == "create_entry" - assert result2["title"] == TEST_EMAIL - assert result2["data"] == { + assert result4["type"] == "create_entry" + assert result4["title"] == TEST_EMAIL + assert result4["data"] == { "username": TEST_EMAIL, "password": TEST_PASSWORD, - "hub": TEST_HUB, + "hub": TEST_SERVER, + "api_type": "cloud", } assert len(mock_setup_entry.mock_calls) == 1 @@ -237,7 +806,7 @@ async def test_dhcp_flow_already_configured(hass: HomeAssistant) -> None: config_entry = MockConfigEntry( domain=DOMAIN, unique_id=TEST_GATEWAY_ID, - data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_SERVER}, ) config_entry.add_to_hass(hass) @@ -266,20 +835,95 @@ async def test_zeroconf_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) - assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == config_entries.SOURCE_USER + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "cloud"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "cloud" + with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( - "pyoverkiz.client.OverkizClient.get_gateways", return_value=None + "pyoverkiz.client.OverkizClient.get_gateways", + return_value=MOCK_GATEWAY_RESPONSE, ): - result2 = await hass.config_entries.flow.async_configure( + result4 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + {"username": TEST_EMAIL, "password": TEST_PASSWORD}, ) - assert result2["type"] == "create_entry" - assert result2["title"] == TEST_EMAIL - assert result2["data"] == { + assert result4["type"] == "create_entry" + assert result4["title"] == TEST_EMAIL + assert result4["data"] == { "username": TEST_EMAIL, "password": TEST_PASSWORD, - "hub": TEST_HUB, + "hub": TEST_SERVER, + "api_type": "cloud", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_local_zeroconf_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test that zeroconf discovery for new local bridge works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=FAKE_ZERO_CONF_INFO_LOCAL, + context={"source": config_entries.SOURCE_ZEROCONF}, + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == config_entries.SOURCE_USER + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"hub": TEST_SERVER}, + ) + + assert result2["type"] == "form" + assert result2["step_id"] == "local_or_cloud" + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"api_type": "local"}, + ) + + assert result3["type"] == "form" + assert result3["step_id"] == "local" + + with patch.multiple( + "pyoverkiz.client.OverkizClient", + login=AsyncMock(return_value=True), + get_gateways=AsyncMock(return_value=MOCK_GATEWAY_RESPONSE), + get_setup_option=AsyncMock(return_value=True), + generate_local_token=AsyncMock(return_value="1234123412341234"), + activate_local_token=AsyncMock(return_value=True), + ): + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": TEST_EMAIL, "password": TEST_PASSWORD, "verify_ssl": False}, + ) + + assert result4["type"] == "create_entry" + assert result4["title"] == "gateway-1234-5678-9123.local:8443" + assert result4["data"] == { + "username": TEST_EMAIL, + "password": TEST_PASSWORD, + "hub": TEST_SERVER, + "host": "gateway-1234-5678-9123.local:8443", + "api_type": "local", + "token": "1234123412341234", + "verify_ssl": False, } assert len(mock_setup_entry.mock_calls) == 1 @@ -290,7 +934,7 @@ async def test_zeroconf_flow_already_configured(hass: HomeAssistant) -> None: config_entry = MockConfigEntry( domain=DOMAIN, unique_id=TEST_GATEWAY_ID, - data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_SERVER}, ) config_entry.add_to_hass(hass) @@ -302,85 +946,3 @@ async def test_zeroconf_flow_already_configured(hass: HomeAssistant) -> None: assert result["type"] == data_entry_flow.FlowResultType.ABORT assert result["reason"] == "already_configured" - - -async def test_reauth_success(hass: HomeAssistant) -> None: - """Test reauthentication flow.""" - - mock_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=TEST_GATEWAY_ID, - data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB2}, - ) - mock_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) - - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "user" - - with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( - "pyoverkiz.client.OverkizClient.get_gateways", - return_value=MOCK_GATEWAY_RESPONSE, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - "username": TEST_EMAIL, - "password": TEST_PASSWORD2, - "hub": TEST_HUB2, - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - assert mock_entry.data["username"] == TEST_EMAIL - assert mock_entry.data["password"] == TEST_PASSWORD2 - - -async def test_reauth_wrong_account(hass: HomeAssistant) -> None: - """Test reauthentication flow.""" - - mock_entry = MockConfigEntry( - domain=DOMAIN, - unique_id=TEST_GATEWAY_ID, - data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB2}, - ) - mock_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) - - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "user" - - with patch("pyoverkiz.client.OverkizClient.login", return_value=True), patch( - "pyoverkiz.client.OverkizClient.get_gateways", - return_value=MOCK_GATEWAY2_RESPONSE, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - "username": TEST_EMAIL, - "password": TEST_PASSWORD2, - "hub": TEST_HUB2, - }, - ) - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "reauth_wrong_account" diff --git a/tests/components/overkiz/test_init.py b/tests/components/overkiz/test_init.py index 774f3c9a79a..ddecee7c167 100644 --- a/tests/components/overkiz/test_init.py +++ b/tests/components/overkiz/test_init.py @@ -4,7 +4,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from .test_config_flow import TEST_EMAIL, TEST_GATEWAY_ID, TEST_HUB, TEST_PASSWORD +from .test_config_flow import TEST_EMAIL, TEST_GATEWAY_ID, TEST_PASSWORD, TEST_SERVER from tests.common import MockConfigEntry, mock_registry @@ -23,7 +23,7 @@ async def test_unique_id_migration(hass: HomeAssistant) -> None: mock_entry = MockConfigEntry( domain=DOMAIN, unique_id=TEST_GATEWAY_ID, - data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_HUB}, + data={"username": TEST_EMAIL, "password": TEST_PASSWORD, "hub": TEST_SERVER}, ) mock_entry.add_to_hass(hass) diff --git a/tests/components/p1_monitor/test_diagnostics.py b/tests/components/p1_monitor/test_diagnostics.py index 47f43dd3401..55d4ccc5e67 100644 --- a/tests/components/p1_monitor/test_diagnostics.py +++ b/tests/components/p1_monitor/test_diagnostics.py @@ -35,12 +35,12 @@ async def test_diagnostics( "energy_production_low": 1432.279, }, "phases": { - "voltage_phase_l1": "233.6", - "voltage_phase_l2": "0.0", - "voltage_phase_l3": "233.0", - "current_phase_l1": "1.6", - "current_phase_l2": "4.44", - "current_phase_l3": "3.51", + "voltage_phase_l1": 233.6, + "voltage_phase_l2": 0.0, + "voltage_phase_l3": 233.0, + "current_phase_l1": 1.6, + "current_phase_l2": 4.44, + "current_phase_l3": 3.51, "power_consumed_phase_l1": 315, "power_consumed_phase_l2": 0, "power_consumed_phase_l3": 624, @@ -49,11 +49,11 @@ async def test_diagnostics( "power_produced_phase_l3": 0, }, "settings": { - "gas_consumption_price": "0.64", - "energy_consumption_price_high": "0.20522", - "energy_consumption_price_low": "0.20522", - "energy_production_price_high": "0.20522", - "energy_production_price_low": "0.20522", + "gas_consumption_price": 0.64, + "energy_consumption_price_high": 0.20522, + "energy_consumption_price_low": 0.20522, + "energy_production_price_high": 0.20522, + "energy_production_price_low": 0.20522, }, "watermeter": { "consumption_day": 112.0, diff --git a/tests/components/peco/test_config_flow.py b/tests/components/peco/test_config_flow.py index 532450f0099..ca6759baeff 100644 --- a/tests/components/peco/test_config_flow.py +++ b/tests/components/peco/test_config_flow.py @@ -1,6 +1,7 @@ """Test the PECO Outage Counter config flow.""" from unittest.mock import patch +from peco import HttpError, IncompatibleMeterError, UnresponsiveMeterError import pytest from voluptuous.error import MultipleInvalid @@ -17,6 +18,7 @@ async def test_form(hass: HomeAssistant) -> None: ) assert result["type"] == FlowResultType.FORM assert result["errors"] is None + assert result["step_id"] == "user" with patch( "homeassistant.components.peco.async_setup_entry", @@ -35,6 +37,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["data"] == { "county": "PHILADELPHIA", } + assert result2["context"]["unique_id"] == "PHILADELPHIA" async def test_invalid_county(hass: HomeAssistant) -> None: @@ -43,37 +46,160 @@ async def test_invalid_county(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == FlowResultType.FORM - assert result["errors"] is None - - with pytest.raises(MultipleInvalid): - await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "county": "INVALID_COUNTY_THAT_SHOULD_NOT_EXIST", - }, - ) - await hass.async_block_till_done() - - second_result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert second_result["type"] == FlowResultType.FORM - assert second_result["errors"] is None + assert result["step_id"] == "user" with patch( "homeassistant.components.peco.async_setup_entry", return_value=True, - ): - second_result2 = await hass.config_entries.flow.async_configure( - second_result["flow_id"], + ), pytest.raises(MultipleInvalid): + await hass.config_entries.flow.async_configure( + result["flow_id"], { - "county": "PHILADELPHIA", + "county": "INVALID_COUNTY_THAT_SHOULDNT_EXIST", }, ) await hass.async_block_till_done() - assert second_result2["type"] == FlowResultType.CREATE_ENTRY - assert second_result2["title"] == "Philadelphia Outage Count" - assert second_result2["data"] == { - "county": "PHILADELPHIA", - } + +async def test_meter_value_error(hass: HomeAssistant) -> None: + """Test if the MeterValueError error works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "county": "PHILADELPHIA", + "phone_number": "INVALID_SMART_METER_THAT_SHOULD_NOT_EXIST", + }, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + assert result["progress_action"] == "verifying_meter" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"phone_number": "invalid_phone_number"} + + +async def test_incompatible_meter_error(hass: HomeAssistant) -> None: + """Test if the IncompatibleMeter error works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + + with patch("peco.PecoOutageApi.meter_check", side_effect=IncompatibleMeterError()): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "county": "PHILADELPHIA", + "phone_number": "1234567890", + }, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + assert result["progress_action"] == "verifying_meter" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "incompatible_meter" + + +async def test_unresponsive_meter_error(hass: HomeAssistant) -> None: + """Test if the UnresponsiveMeter error works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + + with patch("peco.PecoOutageApi.meter_check", side_effect=UnresponsiveMeterError()): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "county": "PHILADELPHIA", + "phone_number": "1234567890", + }, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + assert result["progress_action"] == "verifying_meter" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"phone_number": "unresponsive_meter"} + + +async def test_meter_http_error(hass: HomeAssistant) -> None: + """Test if the InvalidMeter error works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + + with patch("peco.PecoOutageApi.meter_check", side_effect=HttpError): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "county": "PHILADELPHIA", + "phone_number": "1234567890", + }, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + assert result["progress_action"] == "verifying_meter" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"phone_number": "http_error"} + + +async def test_smart_meter(hass: HomeAssistant) -> None: + """Test if the Smart Meter step works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "user" + + with patch("peco.PecoOutageApi.meter_check", return_value=True): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "county": "PHILADELPHIA", + "phone_number": "1234567890", + }, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + assert result["progress_action"] == "verifying_meter" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == "Philadelphia - 1234567890" + assert result["data"]["phone_number"] == "1234567890" + assert result["context"]["unique_id"] == "PHILADELPHIA-1234567890" diff --git a/tests/components/peco/test_init.py b/tests/components/peco/test_init.py index 52a7ddd3b25..2919e508c97 100644 --- a/tests/components/peco/test_init.py +++ b/tests/components/peco/test_init.py @@ -2,7 +2,13 @@ import asyncio from unittest.mock import patch -from peco import AlertResults, BadJSONError, HttpError, OutageResults +from peco import ( + AlertResults, + BadJSONError, + HttpError, + OutageResults, + UnresponsiveMeterError, +) import pytest from homeassistant.components.peco.const import DOMAIN @@ -14,6 +20,7 @@ from tests.common import MockConfigEntry MOCK_ENTRY_DATA = {"county": "TOTAL"} COUNTY_ENTRY_DATA = {"county": "BUCKS"} INVALID_COUNTY_DATA = {"county": "INVALID"} +METER_DATA = {"county": "BUCKS", "phone_number": "1234567890"} async def test_unload_entry(hass: HomeAssistant) -> None: @@ -149,3 +156,154 @@ async def test_bad_json(hass: HomeAssistant, sensor: str) -> None: assert hass.states.get(f"sensor.{sensor}") is None assert config_entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_unresponsive_meter_error(hass: HomeAssistant) -> None: + """Test if it raises an error when the meter will not respond.""" + + config_entry = MockConfigEntry(domain=DOMAIN, data=METER_DATA) + config_entry.add_to_hass(hass) + + with patch( + "peco.PecoOutageApi.meter_check", + side_effect=UnresponsiveMeterError(), + ), patch( + "peco.PecoOutageApi.get_outage_count", + return_value=OutageResults( + customers_out=0, + percent_customers_out=0, + outage_count=0, + customers_served=350394, + ), + ), patch( + "peco.PecoOutageApi.get_map_alerts", + return_value=AlertResults( + alert_content="Testing 1234", alert_title="Testing 4321" + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.meter_status") is None + assert config_entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_meter_http_error(hass: HomeAssistant) -> None: + """Test if it raises an error when there is an HTTP error.""" + + config_entry = MockConfigEntry(domain=DOMAIN, data=METER_DATA) + config_entry.add_to_hass(hass) + + with patch( + "peco.PecoOutageApi.meter_check", + side_effect=HttpError(), + ), patch( + "peco.PecoOutageApi.get_outage_count", + return_value=OutageResults( + customers_out=0, + percent_customers_out=0, + outage_count=0, + customers_served=350394, + ), + ), patch( + "peco.PecoOutageApi.get_map_alerts", + return_value=AlertResults( + alert_content="Testing 1234", alert_title="Testing 4321" + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.meter_status") is None + assert config_entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_meter_bad_json(hass: HomeAssistant) -> None: + """Test if it raises an error when there is bad JSON.""" + + config_entry = MockConfigEntry(domain=DOMAIN, data=METER_DATA) + config_entry.add_to_hass(hass) + + with patch( + "peco.PecoOutageApi.meter_check", + side_effect=BadJSONError(), + ), patch( + "peco.PecoOutageApi.get_outage_count", + return_value=OutageResults( + customers_out=0, + percent_customers_out=0, + outage_count=0, + customers_served=350394, + ), + ), patch( + "peco.PecoOutageApi.get_map_alerts", + return_value=AlertResults( + alert_content="Testing 1234", alert_title="Testing 4321" + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.meter_status") is None + assert config_entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_meter_timeout(hass: HomeAssistant) -> None: + """Test if it raises an error when there is a timeout.""" + + config_entry = MockConfigEntry(domain=DOMAIN, data=METER_DATA) + config_entry.add_to_hass(hass) + + with patch( + "peco.PecoOutageApi.meter_check", + side_effect=asyncio.TimeoutError(), + ), patch( + "peco.PecoOutageApi.get_outage_count", + return_value=OutageResults( + customers_out=0, + percent_customers_out=0, + outage_count=0, + customers_served=350394, + ), + ), patch( + "peco.PecoOutageApi.get_map_alerts", + return_value=AlertResults( + alert_content="Testing 1234", alert_title="Testing 4321" + ), + ): + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.meter_status") is None + assert config_entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_meter_data(hass: HomeAssistant) -> None: + """Test if the meter returns the value successfully.""" + + config_entry = MockConfigEntry(domain=DOMAIN, data=METER_DATA) + config_entry.add_to_hass(hass) + + with patch( + "peco.PecoOutageApi.meter_check", + return_value=True, + ), patch( + "peco.PecoOutageApi.get_outage_count", + return_value=OutageResults( + customers_out=0, + percent_customers_out=0, + outage_count=0, + customers_served=350394, + ), + ), patch( + "peco.PecoOutageApi.get_map_alerts", + return_value=AlertResults( + alert_content="Testing 1234", alert_title="Testing 4321" + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.meter_status") is not None + assert hass.states.get("binary_sensor.meter_status").state == "on" + assert config_entry.state == ConfigEntryState.LOADED diff --git a/tests/components/permobil/__init__.py b/tests/components/permobil/__init__.py new file mode 100644 index 00000000000..56e779eef4d --- /dev/null +++ b/tests/components/permobil/__init__.py @@ -0,0 +1 @@ +"""Tests for the MyPermobil integration.""" diff --git a/tests/components/permobil/conftest.py b/tests/components/permobil/conftest.py new file mode 100644 index 00000000000..2dcf9bd5ad2 --- /dev/null +++ b/tests/components/permobil/conftest.py @@ -0,0 +1,27 @@ +"""Common fixtures for the MyPermobil tests.""" +from collections.abc import Generator +from unittest.mock import AsyncMock, Mock, patch + +from mypermobil import MyPermobil +import pytest + +from .const import MOCK_REGION_NAME, MOCK_TOKEN, MOCK_URL + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.permobil.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def my_permobil() -> Mock: + """Mock spec for MyPermobilApi.""" + mock = Mock(spec=MyPermobil) + mock.request_region_names.return_value = {MOCK_REGION_NAME: MOCK_URL} + mock.request_application_token.return_value = MOCK_TOKEN + mock.region = "" + return mock diff --git a/tests/components/permobil/const.py b/tests/components/permobil/const.py new file mode 100644 index 00000000000..cb8a0c32f17 --- /dev/null +++ b/tests/components/permobil/const.py @@ -0,0 +1,5 @@ +"""Test constants for Permobil.""" + +MOCK_URL = "https://example.com" +MOCK_REGION_NAME = "region_name" +MOCK_TOKEN = ("a" * 256, "date") diff --git a/tests/components/permobil/test_config_flow.py b/tests/components/permobil/test_config_flow.py new file mode 100644 index 00000000000..ad61ead7bfc --- /dev/null +++ b/tests/components/permobil/test_config_flow.py @@ -0,0 +1,288 @@ +"""Test the MyPermobil config flow.""" +from unittest.mock import Mock, patch + +from mypermobil import MyPermobilAPIException, MyPermobilClientException +import pytest + +from homeassistant import config_entries +from homeassistant.components.permobil import config_flow +from homeassistant.const import CONF_CODE, CONF_EMAIL, CONF_REGION, CONF_TOKEN, CONF_TTL +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_REGION_NAME, MOCK_TOKEN, MOCK_URL + +from tests.common import MockConfigEntry + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + +MOCK_CODE = "012345" +MOCK_EMAIL = "valid@email.com" +INVALID_EMAIL = "this is not a valid email" +VALID_DATA = { + CONF_EMAIL: MOCK_EMAIL, + CONF_REGION: MOCK_URL, + CONF_CODE: MOCK_CODE, + CONF_TOKEN: MOCK_TOKEN[0], + CONF_TTL: MOCK_TOKEN[1], +} + + +async def test_sucessful_config_flow(hass: HomeAssistant, my_permobil: Mock) -> None: + """Test the config flow from start to finish with no errors.""" + # init flow + with patch( + "homeassistant.components.permobil.config_flow.MyPermobil", + return_value=my_permobil, + ): + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={CONF_EMAIL: MOCK_EMAIL}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "region" + assert result["errors"] == {} + + # select region step + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_REGION: MOCK_REGION_NAME}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "email_code" + assert result["errors"] == {} + # request region code + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_CODE: MOCK_CODE}, + ) + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == VALID_DATA + + +async def test_config_flow_incorrect_code( + hass: HomeAssistant, my_permobil: Mock +) -> None: + """Test the config flow from start to until email code verification and have the API return error.""" + my_permobil.request_application_token.side_effect = MyPermobilAPIException + # init flow + with patch( + "homeassistant.components.permobil.config_flow.MyPermobil", + return_value=my_permobil, + ): + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={CONF_EMAIL: MOCK_EMAIL}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "region" + assert result["errors"] == {} + + # select region step + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_REGION: MOCK_REGION_NAME}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "email_code" + assert result["errors"] == {} + + # request region code + # here the request_application_token raises a MyPermobilAPIException + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_CODE: MOCK_CODE}, + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "email_code" + assert result["errors"]["base"] == "invalid_code" + + +async def test_config_flow_incorrect_region( + hass: HomeAssistant, my_permobil: Mock +) -> None: + """Test the config flow from start to until the request for email code and have the API return error.""" + my_permobil.request_application_code.side_effect = MyPermobilAPIException + # init flow + with patch( + "homeassistant.components.permobil.config_flow.MyPermobil", + return_value=my_permobil, + ): + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={CONF_EMAIL: MOCK_EMAIL}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "region" + assert result["errors"] == {} + + # select region step + # here the request_application_code raises a MyPermobilAPIException + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_REGION: MOCK_REGION_NAME}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "region" + assert result["errors"]["base"] == "code_request_error" + + +async def test_config_flow_region_request_error( + hass: HomeAssistant, my_permobil: Mock +) -> None: + """Test the config flow from start to until the request for regions and have the API return error.""" + my_permobil.request_region_names.side_effect = MyPermobilAPIException + # init flow + # here the request_region_names raises a MyPermobilAPIException + with patch( + "homeassistant.components.permobil.config_flow.MyPermobil", + return_value=my_permobil, + ): + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={CONF_EMAIL: MOCK_EMAIL}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "region" + assert result["errors"]["base"] == "region_fetch_error" + + +async def test_config_flow_invalid_email( + hass: HomeAssistant, my_permobil: Mock +) -> None: + """Test the config flow from start to until the request for regions and have the API return error.""" + my_permobil.set_email.side_effect = MyPermobilClientException() + # init flow + # here the set_email raises a MyPermobilClientException + with patch( + "homeassistant.components.permobil.config_flow.MyPermobil", + return_value=my_permobil, + ): + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data={CONF_EMAIL: INVALID_EMAIL}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == config_entries.SOURCE_USER + assert result["errors"]["base"] == "invalid_email" + + +async def test_config_flow_reauth_success( + hass: HomeAssistant, my_permobil: Mock +) -> None: + """Test the config flow reauth make sure that the values are replaced.""" + # new token and code + reauth_token = ("b" * 256, "reauth_date") + reauth_code = "567890" + my_permobil.request_application_token.return_value = reauth_token + + mock_entry = MockConfigEntry( + domain="permobil", + data=VALID_DATA, + ) + mock_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.permobil.config_flow.MyPermobil", + return_value=my_permobil, + ): + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": "reauth", "entry_id": mock_entry.entry_id}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "email_code" + assert result["errors"] == {} + + # request request new token + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_CODE: reauth_code}, + ) + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_EMAIL: MOCK_EMAIL, + CONF_REGION: MOCK_URL, + CONF_CODE: reauth_code, + CONF_TOKEN: reauth_token[0], + CONF_TTL: reauth_token[1], + } + + +async def test_config_flow_reauth_fail_invalid_code( + hass: HomeAssistant, my_permobil: Mock +) -> None: + """Test the config flow reauth when the email code fails.""" + # new code + reauth_invalid_code = "567890" # pretend this code is invalid/incorrect + my_permobil.request_application_token.side_effect = MyPermobilAPIException + mock_entry = MockConfigEntry( + domain="permobil", + data=VALID_DATA, + ) + mock_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.permobil.config_flow.MyPermobil", + return_value=my_permobil, + ): + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": "reauth", "entry_id": mock_entry.entry_id}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "email_code" + assert result["errors"] == {} + + # request request new token but have the API return error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_CODE: reauth_invalid_code}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "email_code" + assert result["errors"]["base"] == "invalid_code" + + +async def test_config_flow_reauth_fail_code_request( + hass: HomeAssistant, my_permobil: Mock +) -> None: + """Test the config flow reauth.""" + my_permobil.request_application_code.side_effect = MyPermobilAPIException + mock_entry = MockConfigEntry( + domain="permobil", + data=VALID_DATA, + ) + mock_entry.add_to_hass(hass) + # test the reauth and have request_application_code fail leading to an abort + my_permobil.request_application_code.side_effect = MyPermobilAPIException + reauth_entry = hass.config_entries.async_entries(config_flow.DOMAIN)[0] + with patch( + "homeassistant.components.permobil.config_flow.MyPermobil", + return_value=my_permobil, + ): + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, + context={"source": "reauth", "entry_id": reauth_entry.entry_id}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "unknown" diff --git a/tests/components/person/test_init.py b/tests/components/person/test_init.py index 71491ee3caf..4d7781a095f 100644 --- a/tests/components/person/test_init.py +++ b/tests/components/person/test_init.py @@ -1,4 +1,6 @@ """The tests for the person component.""" +from collections.abc import Callable +from http import HTTPStatus from typing import Any from unittest.mock import patch @@ -29,7 +31,8 @@ from homeassistant.setup import async_setup_component from .conftest import DEVICE_TRACKER, DEVICE_TRACKER_2 from tests.common import MockUser, mock_component, mock_restore_cache -from tests.typing import WebSocketGenerator +from tests.test_util import mock_real_ip +from tests.typing import ClientSessionGenerator, WebSocketGenerator async def test_minimal_setup(hass: HomeAssistant) -> None: @@ -847,3 +850,63 @@ async def test_entities_in_person(hass: HomeAssistant) -> None: "device_tracker.paulus_iphone", "device_tracker.paulus_ipad", ] + + +@pytest.mark.parametrize( + ("ip", "status_code", "expected_fn"), + [ + ( + "192.168.0.10", + HTTPStatus.OK, + lambda user: { + user["user_id"]: {"name": user["name"], "picture": user["picture"]} + }, + ), + ( + "::ffff:192.168.0.10", + HTTPStatus.OK, + lambda user: { + user["user_id"]: {"name": user["name"], "picture": user["picture"]} + }, + ), + ( + "1.2.3.4", + HTTPStatus.BAD_REQUEST, + lambda _: {"code": "not_local", "message": "Not local"}, + ), + ( + "2001:db8::1", + HTTPStatus.BAD_REQUEST, + lambda _: {"code": "not_local", "message": "Not local"}, + ), + ], +) +async def test_list_persons( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + hass_admin_user: MockUser, + ip: str, + status_code: HTTPStatus, + expected_fn: Callable[[dict[str, Any]], dict[str, Any]], +) -> None: + """Test listing persons from a not local ip address.""" + + user_id = hass_admin_user.id + admin = {"id": "1234", "name": "Admin", "user_id": user_id, "picture": "/bla"} + config = { + DOMAIN: [ + admin, + {"id": "5678", "name": "Only a person"}, + ] + } + assert await async_setup_component(hass, DOMAIN, config) + + await async_setup_component(hass, "api", {}) + mock_real_ip(hass.http.app)(ip) + client = await hass_client_no_auth() + + resp = await client.get("/api/person/list") + + assert resp.status == status_code + result = await resp.json() + assert result == expected_fn(admin) diff --git a/tests/components/philips_js/__init__.py b/tests/components/philips_js/__init__.py index f524a586fc8..60e8b238917 100644 --- a/tests/components/philips_js/__init__.py +++ b/tests/components/philips_js/__init__.py @@ -73,3 +73,129 @@ MOCK_CONFIG_PAIRED = { } MOCK_ENTITY_ID = "media_player.philips_tv" + +MOCK_RECORDINGS_LIST = { + "version": "253.91", + "recordings": [ + { + "RecordingId": 36, + "RecordingType": "RECORDING_ONGOING", + "IsIpEpgRec": False, + "ccid": 2091, + "StartTime": 1676833531, + "Duration": 569, + "MarginStart": 0, + "MarginEnd": 0, + "EventId": 47369, + "EITVersion": 0, + "RetentionInfo": 0, + "EventInfo": "This is a event info which is not rejected by codespell.", + "EventExtendedInfo": "", + "EventGenre": "8", + "RecName": "Terra X", + "SeriesID": "None", + "SeasonNo": 0, + "EpisodeNo": 0, + "EpisodeCount": 72300, + "ProgramNumber": 11110, + "EventRating": 0, + "hasDot": True, + "isFTARecording": False, + "LastPinChangedTime": 0, + "Version": 344, + "HasCicamPin": False, + "HasLicenseFile": False, + "Size": 0, + "ResumeInfo": 0, + "IsPartial": False, + "AutoMarginStart": 0, + "AutoMarginEnd": 0, + "ServerRecordingId": -1, + "ActualStartTime": 1676833531, + "ProgramDuration": 0, + "IsRadio": False, + "EITSource": "EIT_SOURCE_PF", + "RecError": "REC_ERROR_NONE", + }, + { + "RecordingId": 35, + "RecordingType": "RECORDING_NEW", + "IsIpEpgRec": False, + "ccid": 2091, + "StartTime": 1676832212, + "Duration": 22, + "MarginStart": 0, + "MarginEnd": 0, + "EventId": 47369, + "EITVersion": 0, + "RetentionInfo": -1, + "EventInfo": "This is another event info which is not rejected by codespell.", + "EventExtendedInfo": "", + "EventGenre": "8", + "RecName": "Terra X", + "SeriesID": "None", + "SeasonNo": 0, + "EpisodeNo": 0, + "EpisodeCount": 70980, + "ProgramNumber": 11110, + "EventRating": 0, + "hasDot": True, + "isFTARecording": False, + "LastPinChangedTime": 0, + "Version": 339, + "HasCicamPin": False, + "HasLicenseFile": False, + "Size": 0, + "ResumeInfo": 0, + "IsPartial": False, + "AutoMarginStart": 0, + "AutoMarginEnd": 0, + "ServerRecordingId": -1, + "ActualStartTime": 1676832212, + "ProgramDuration": 0, + "IsRadio": False, + "EITSource": "EIT_SOURCE_PF", + "RecError": "REC_ERROR_NONE", + }, + { + "RecordingId": 34, + "RecordingType": "RECORDING_PARTIALLY_VIEWED", + "IsIpEpgRec": False, + "ccid": 2091, + "StartTime": 1676677580, + "Duration": 484, + "MarginStart": 0, + "MarginEnd": 0, + "EventId": -1, + "EITVersion": 0, + "RetentionInfo": -1, + "EventInfo": "\n\nAlpine Ski-WM: Parallel-Event, Übertragung aus Méribel/Frankreich\n\n14:10: Biathlon-WM (AD): 20 km Einzel Männer, Übertragung aus Oberhof\nHD-Produktion", + "EventExtendedInfo": "", + "EventGenre": "4", + "RecName": "ZDF HD 2023-02-18 00:46", + "SeriesID": "None", + "SeasonNo": 0, + "EpisodeNo": 0, + "EpisodeCount": 2760, + "ProgramNumber": 11110, + "EventRating": 0, + "hasDot": True, + "isFTARecording": False, + "LastPinChangedTime": 0, + "Version": 328, + "HasCicamPin": False, + "HasLicenseFile": False, + "Size": 0, + "ResumeInfo": 56, + "IsPartial": False, + "AutoMarginStart": 0, + "AutoMarginEnd": 0, + "ServerRecordingId": -1, + "ActualStartTime": 1676677581, + "ProgramDuration": 0, + "IsRadio": False, + "EITSource": "EIT_SOURCE_PF", + "RecError": "REC_ERROR_NONE", + }, + ], +} diff --git a/tests/components/philips_js/test_binary_sensor.py b/tests/components/philips_js/test_binary_sensor.py new file mode 100644 index 00000000000..01233706d07 --- /dev/null +++ b/tests/components/philips_js/test_binary_sensor.py @@ -0,0 +1,83 @@ +"""The tests for philips_js binary_sensor.""" +import pytest + +from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant + +from . import MOCK_NAME, MOCK_RECORDINGS_LIST + +ID_RECORDING_AVAILABLE = ( + "binary_sensor." + MOCK_NAME.replace(" ", "_").lower() + "_new_recording_available" +) +ID_RECORDING_ONGOING = ( + "binary_sensor." + MOCK_NAME.replace(" ", "_").lower() + "_recording_ongoing" +) + + +@pytest.fixture +async def mock_tv_api_invalid(mock_tv): + """Set up a invalid mock_tv with should not create sensors.""" + mock_tv.secured_transport = True + mock_tv.api_version = 1 + mock_tv.recordings_list = None + return mock_tv + + +@pytest.fixture +async def mock_tv_api_valid(mock_tv): + """Set up a valid mock_tv with should create sensors.""" + mock_tv.secured_transport = True + mock_tv.api_version = 6 + mock_tv.recordings_list = MOCK_RECORDINGS_LIST + return mock_tv + + +@pytest.fixture +async def mock_tv_recordings_list_unavailable(mock_tv): + """Set up a valid mock_tv with should create sensors.""" + mock_tv.secured_transport = True + mock_tv.api_version = 6 + mock_tv.recordings_list = None + return mock_tv + + +async def test_recordings_list_api_invalid( + mock_tv_api_invalid, mock_config_entry, hass: HomeAssistant +) -> None: + """Test if sensors are not created if mock_tv is invalid.""" + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + state = hass.states.get(ID_RECORDING_AVAILABLE) + assert state is None + + state = hass.states.get(ID_RECORDING_ONGOING) + assert state is None + + +async def test_recordings_list_valid( + mock_tv_api_valid, mock_config_entry, hass: HomeAssistant +) -> None: + """Test if sensors are created correctly.""" + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + state = hass.states.get(ID_RECORDING_AVAILABLE) + assert state.state == STATE_ON + + state = hass.states.get(ID_RECORDING_ONGOING) + assert state.state == STATE_ON + + +async def test_recordings_list_unavailable( + mock_tv_recordings_list_unavailable, mock_config_entry, hass: HomeAssistant +) -> None: + """Test if sensors are created correctly.""" + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + state = hass.states.get(ID_RECORDING_AVAILABLE) + assert state.state == STATE_OFF + + state = hass.states.get(ID_RECORDING_ONGOING) + assert state.state == STATE_OFF diff --git a/tests/components/picnic/conftest.py b/tests/components/picnic/conftest.py new file mode 100644 index 00000000000..1ca6413fc42 --- /dev/null +++ b/tests/components/picnic/conftest.py @@ -0,0 +1,79 @@ +"""Conftest for Picnic tests.""" +from collections.abc import Awaitable, Callable +import json +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.components.picnic import CONF_COUNTRY_CODE, DOMAIN +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_fixture +from tests.typing import WebSocketGenerator + +ENTITY_ID = "todo.mock_title_shopping_cart" + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ACCESS_TOKEN: "x-original-picnic-auth-token", + CONF_COUNTRY_CODE: "NL", + }, + unique_id="295-6y3-1nf4", + ) + + +@pytest.fixture +def mock_picnic_api(): + """Return a mocked PicnicAPI client.""" + with patch("homeassistant.components.picnic.PicnicAPI") as mock: + client = mock.return_value + client.session.auth_token = "3q29fpwhulzes" + client.get_cart.return_value = json.loads(load_fixture("picnic/cart.json")) + client.get_user.return_value = json.loads(load_fixture("picnic/user.json")) + client.get_deliveries.return_value = json.loads( + load_fixture("picnic/delivery.json") + ) + client.get_delivery_position.return_value = {} + yield client + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_picnic_api: MagicMock +) -> MockConfigEntry: + """Set up the Picnic integration for testing.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry + + +@pytest.fixture +async def get_items( + hass_ws_client: WebSocketGenerator +) -> Callable[[], Awaitable[dict[str, str]]]: + """Fixture to fetch items from the todo websocket.""" + + async def get() -> list[dict[str, str]]: + # Fetch items using To-do platform + client = await hass_ws_client() + await client.send_json_auto_id( + { + "id": id, + "type": "todo/item/list", + "entity_id": ENTITY_ID, + } + ) + resp = await client.receive_json() + assert resp.get("success") + return resp.get("result", {}).get("items", []) + + return get diff --git a/tests/components/picnic/fixtures/cart.json b/tests/components/picnic/fixtures/cart.json new file mode 100644 index 00000000000..bde170bb26a --- /dev/null +++ b/tests/components/picnic/fixtures/cart.json @@ -0,0 +1,337 @@ +{ + "items": [ + { + "type": "ORDER_LINE", + "id": "763", + "items": [ + { + "type": "ORDER_ARTICLE", + "id": "s1001194", + "name": "Knoflook", + "image_ids": [ + "4054013cb82da80abbdcd7c8eec54f486bfa180b9cf499e94cc4013470d0dfd7" + ], + "unit_quantity": "2 stuks", + "unit_quantity_sub": "€9.08/kg", + "price": 109, + "max_count": 50, + "perishable": false, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "2 stuks" + } + ] + } + ], + "display_price": 109, + "price": 109 + }, + { + "type": "ORDER_LINE", + "id": "765_766", + "items": [ + { + "type": "ORDER_ARTICLE", + "id": "s1046297", + "name": "Picnic magere melk", + "image_ids": [ + "c2a96757634ada380726d3307e564f244cfa86e89d94c2c0e382306dbad599a3" + ], + "unit_quantity": "2 x 1 liter", + "unit_quantity_sub": "€1.02/l", + "price": 204, + "max_count": 18, + "perishable": true, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 2 + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "2 x 1 liter" + } + ] + } + ], + "display_price": 408, + "price": 408 + }, + { + "type": "ORDER_LINE", + "id": "767", + "items": [ + { + "type": "ORDER_ARTICLE", + "id": "s1010532", + "name": "Picnic magere melk", + "image_ids": [ + "aa8880361f045ffcfb9f787e9b7fc2b49907be46921bf42985506dc03baa6c2c" + ], + "unit_quantity": "1 liter", + "unit_quantity_sub": "€1.05/l", + "price": 105, + "max_count": 18, + "perishable": true, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "1 liter" + } + ] + } + ], + "display_price": 105, + "price": 105 + }, + { + "type": "ORDER_LINE", + "id": "774_775", + "items": [ + { + "type": "ORDER_ARTICLE", + "id": "s1018253", + "name": "Robijn wascapsules wit", + "image_ids": [ + "c78b809ccbcd65760f8ce897e083587ee7b3f2b9719affd80983fad722b5c2d9" + ], + "unit_quantity": "40 wasbeurten", + "price": 2899, + "max_count": 50, + "perishable": false, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "IMMUTABLE" + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "40 wasbeurten" + } + ] + }, + { + "type": "ORDER_ARTICLE", + "id": "s1007025", + "name": "Robijn wascapsules kleur", + "image_ids": [ + "ef9c8a371a639906ef20dfdcdc99296fce4102c47f0018e6329a2e4ae9f846b7" + ], + "unit_quantity": "15 wasbeurten", + "price": 879, + "max_count": 50, + "perishable": false, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "IMMUTABLE" + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "15 wasbeurten" + } + ] + } + ], + "display_price": 3778, + "price": 3778, + "decorators": [ + { + "type": "PROMO", + "text": "1+1 gratis" + }, + { + "type": "PRICE", + "display_price": 1889 + } + ] + }, + { + "type": "ORDER_LINE", + "id": "776_777_778_779_780", + "items": [ + { + "type": "ORDER_ARTICLE", + "id": "s1012699", + "name": "Chinese wokgroenten", + "image_ids": [ + "b0b547a03d1d6021565618a5d32bd35df34c57b348d73252defb776ab8f8ab76" + ], + "unit_quantity": "600 gram", + "unit_quantity_sub": "€4.92/kg", + "price": 295, + "max_count": 50, + "perishable": true, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "IMMUTABLE" + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "600 gram" + } + ] + }, + { + "type": "ORDER_ARTICLE", + "id": "s1003425", + "name": "Picnic boerderij-eitjes", + "image_ids": [ + "8be72b8144bfb7ff637d4703cfcb11e1bee789de79c069d00e879650dbf19840" + ], + "unit_quantity": "6 stuks M/L", + "price": 305, + "max_count": 50, + "perishable": true, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "IMMUTABLE" + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "6 stuks M/L" + } + ] + }, + { + "type": "ORDER_ARTICLE", + "id": "s1016692", + "name": "Picnic witte snelkookrijst", + "image_ids": [ + "9c76c0a0143bfef650ab85fff4f0918e0b4e2927d79caa2a2bf394f292a86213" + ], + "unit_quantity": "400 gram", + "unit_quantity_sub": "€3.23/kg", + "price": 129, + "max_count": 99, + "perishable": false, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "IMMUTABLE" + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "400 gram" + } + ] + }, + { + "type": "ORDER_ARTICLE", + "id": "s1012503", + "name": "Conimex kruidenmix nasi", + "image_ids": [ + "2eb78de465aa327a9739d9b204affce17fdf6bf7675c4fe9fa2d4ec102791c69" + ], + "unit_quantity": "20 gram", + "unit_quantity_sub": "€42.50/kg", + "price": 85, + "max_count": 50, + "perishable": false, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "IMMUTABLE" + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "20 gram" + } + ] + }, + { + "type": "ORDER_ARTICLE", + "id": "s1005028", + "name": "Conimex satésaus mild kant & klaar", + "image_ids": [ + "0273de24577ba25526cdf31c53ef2017c62611b2bb4d82475abb2dcd9b2f5b83" + ], + "unit_quantity": "400 gram", + "unit_quantity_sub": "€5.98/kg", + "price": 239, + "max_count": 50, + "perishable": false, + "tags": [], + "decorators": [ + { + "type": "QUANTITY", + "quantity": 1 + }, + { + "type": "IMMUTABLE" + }, + { + "type": "UNIT_QUANTITY", + "unit_quantity_text": "400 gram" + } + ] + } + ], + "display_price": 1053, + "price": 1053, + "decorators": [ + { + "type": "PROMO", + "text": "Receptkorting" + }, + { + "type": "PRICE", + "display_price": 880 + } + ] + } + ], + "delivery_slots": [ + { + "slot_id": "611a3b074872b23576bef456a", + "window_start": "2021-03-03T14:45:00.000+01:00", + "window_end": "2021-03-03T15:45:00.000+01:00", + "cut_off_time": "2021-03-02T22:00:00.000+01:00", + "minimum_order_value": 3500 + } + ], + "selected_slot": { + "slot_id": "611a3b074872b23576bef456a", + "state": "EXPLICIT" + }, + "total_count": 10, + "total_price": 2535 +} diff --git a/tests/components/picnic/fixtures/delivery.json b/tests/components/picnic/fixtures/delivery.json new file mode 100644 index 00000000000..61a7fe7ac35 --- /dev/null +++ b/tests/components/picnic/fixtures/delivery.json @@ -0,0 +1,31 @@ +{ + "delivery_id": "z28fjso23e", + "creation_time": "2021-02-24T21:48:46.395+01:00", + "slot": { + "slot_id": "602473859a40dc24c6b65879", + "hub_id": "AMS", + "window_start": "2021-02-26T20:15:00.000+01:00", + "window_end": "2021-02-26T21:15:00.000+01:00", + "cut_off_time": "2021-02-25T22:00:00.000+01:00", + "minimum_order_value": 3500 + }, + "eta2": { + "start": "2021-02-26T20:54:00.000+01:00", + "end": "2021-02-26T21:14:00.000+01:00" + }, + "status": "COMPLETED", + "delivery_time": { + "start": "2021-02-26T20:54:05.221+01:00", + "end": "2021-02-26T20:58:31.802+01:00" + }, + "orders": [ + { + "creation_time": "2021-02-24T21:48:46.418+01:00", + "total_price": 3597 + }, + { + "creation_time": "2021-02-25T17:10:26.816+01:00", + "total_price": 536 + } + ] +} diff --git a/tests/components/picnic/fixtures/user.json b/tests/components/picnic/fixtures/user.json new file mode 100644 index 00000000000..3656d11e98c --- /dev/null +++ b/tests/components/picnic/fixtures/user.json @@ -0,0 +1,14 @@ +{ + "user_id": "295-6y3-1nf4", + "firstname": "User", + "lastname": "Name", + "address": { + "house_number": 123, + "house_number_ext": "a", + "postcode": "4321 AB", + "street": "Commonstreet", + "city": "Somewhere" + }, + "total_deliveries": 123, + "completed_deliveries": 112 +} diff --git a/tests/components/picnic/snapshots/test_todo.ambr b/tests/components/picnic/snapshots/test_todo.ambr new file mode 100644 index 00000000000..4b92584c0fc --- /dev/null +++ b/tests/components/picnic/snapshots/test_todo.ambr @@ -0,0 +1,55 @@ +# serializer version: 1 +# name: test_cart_list_with_items + list([ + dict({ + 'status': 'needs_action', + 'summary': 'Knoflook (2 stuks)', + 'uid': '763-s1001194', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Picnic magere melk (2 x 1 liter)', + 'uid': '765_766-s1046297', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Picnic magere melk (1 liter)', + 'uid': '767-s1010532', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Robijn wascapsules wit (40 wasbeurten)', + 'uid': '774_775-s1018253', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Robijn wascapsules kleur (15 wasbeurten)', + 'uid': '774_775-s1007025', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Chinese wokgroenten (600 gram)', + 'uid': '776_777_778_779_780-s1012699', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Picnic boerderij-eitjes (6 stuks M/L)', + 'uid': '776_777_778_779_780-s1003425', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Picnic witte snelkookrijst (400 gram)', + 'uid': '776_777_778_779_780-s1016692', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Conimex kruidenmix nasi (20 gram)', + 'uid': '776_777_778_779_780-s1012503', + }), + dict({ + 'status': 'needs_action', + 'summary': 'Conimex satésaus mild kant & klaar (400 gram)', + 'uid': '776_777_778_779_780-s1005028', + }), + ]) +# --- diff --git a/tests/components/picnic/test_todo.py b/tests/components/picnic/test_todo.py new file mode 100644 index 00000000000..cdd30967058 --- /dev/null +++ b/tests/components/picnic/test_todo.py @@ -0,0 +1,126 @@ +"""Tests for Picnic Tasks todo platform.""" + +from unittest.mock import MagicMock, Mock + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.todo import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from .conftest import ENTITY_ID + +from tests.common import MockConfigEntry + + +async def test_cart_list_with_items( + hass: HomeAssistant, + init_integration, + get_items, + snapshot: SnapshotAssertion, +) -> None: + """Test loading of shopping cart.""" + state = hass.states.get(ENTITY_ID) + assert state + assert state.state == "10" + + assert snapshot == await get_items() + + +async def test_cart_list_empty_items( + hass: HomeAssistant, mock_picnic_api: MagicMock, mock_config_entry: MockConfigEntry +) -> None: + """Test loading of shopping cart without items.""" + mock_picnic_api.get_cart.return_value = {"items": []} + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state + assert state.state == "0" + + +async def test_cart_list_unexpected_response( + hass: HomeAssistant, mock_picnic_api: MagicMock, mock_config_entry: MockConfigEntry +) -> None: + """Test loading of shopping cart without expected response.""" + mock_picnic_api.get_cart.return_value = {} + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state is None + + +async def test_cart_list_null_response( + hass: HomeAssistant, mock_picnic_api: MagicMock, mock_config_entry: MockConfigEntry +) -> None: + """Test loading of shopping cart without response.""" + mock_picnic_api.get_cart.return_value = None + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state is None + + +async def test_create_todo_list_item( + hass: HomeAssistant, init_integration: MockConfigEntry, mock_picnic_api: MagicMock +) -> None: + """Test for creating a picnic cart item.""" + assert len(mock_picnic_api.get_cart.mock_calls) == 1 + + mock_picnic_api.search = Mock() + mock_picnic_api.search.return_value = [ + { + "items": [ + { + "id": 321, + "name": "Picnic Melk", + "unit_quantity": "2 liter", + } + ] + } + ] + + mock_picnic_api.add_product = Mock() + + await hass.services.async_call( + DOMAIN, + "add_item", + {"item": "Melk"}, + target={"entity_id": ENTITY_ID}, + blocking=True, + ) + + args = mock_picnic_api.search.call_args + assert args + assert args[0][0] == "Melk" + + args = mock_picnic_api.add_product.call_args + assert args + assert args[0][0] == "321" + assert args[0][1] == 1 + + assert len(mock_picnic_api.get_cart.mock_calls) == 2 + + +async def test_create_todo_list_item_not_found( + hass: HomeAssistant, init_integration: MockConfigEntry, mock_picnic_api: MagicMock +) -> None: + """Test for creating a picnic cart item when ID is not found.""" + mock_picnic_api.search = Mock() + mock_picnic_api.search.return_value = [{"items": []}] + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + "add_item", + {"item": "Melk"}, + target={"entity_id": ENTITY_ID}, + blocking=True, + ) diff --git a/tests/components/ping/conftest.py b/tests/components/ping/conftest.py new file mode 100644 index 00000000000..4ad06a09c1c --- /dev/null +++ b/tests/components/ping/conftest.py @@ -0,0 +1,54 @@ +"""Test configuration for ping.""" +from unittest.mock import patch + +from icmplib import Host +import pytest + +from homeassistant.components.ping import DOMAIN +from homeassistant.components.ping.const import CONF_PING_COUNT +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def patch_setup(*args, **kwargs): + """Patch setup methods.""" + with patch( + "homeassistant.components.ping.async_setup_entry", + return_value=True, + ), patch("homeassistant.components.ping.async_setup", return_value=True): + yield + + +@pytest.fixture(autouse=True) +async def patch_ping(): + """Patch icmplib async_ping.""" + mock = Host("10.10.10.10", 5, [10, 1, 2]) + + with patch( + "homeassistant.components.ping.helpers.async_ping", return_value=mock + ), patch("homeassistant.components.ping.async_ping", return_value=mock): + yield mock + + +@pytest.fixture(name="config_entry") +async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return a MockConfigEntry for testing.""" + return MockConfigEntry( + domain=DOMAIN, + title="10.10.10.10", + options={CONF_HOST: "10.10.10.10", CONF_PING_COUNT: 10.0}, + ) + + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, patch_ping +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/ping/const.py b/tests/components/ping/const.py new file mode 100644 index 00000000000..cf002dc7ca6 --- /dev/null +++ b/tests/components/ping/const.py @@ -0,0 +1,11 @@ +"""Constants for tests.""" +from icmplib import Host + +BINARY_SENSOR_IMPORT_DATA = { + "name": "test2", + "host": "127.0.0.1", + "count": 1, + "scan_interval": 50, +} + +NON_AVAILABLE_HOST_PING = Host("192.168.178.1", 10, []) diff --git a/tests/components/ping/fixtures/configuration.yaml b/tests/components/ping/fixtures/configuration.yaml deleted file mode 100644 index 201c020835e..00000000000 --- a/tests/components/ping/fixtures/configuration.yaml +++ /dev/null @@ -1,5 +0,0 @@ -binary_sensor: - - platform: ping - name: test2 - host: 127.0.0.1 - count: 1 diff --git a/tests/components/ping/snapshots/test_binary_sensor.ambr b/tests/components/ping/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..2ce320d561b --- /dev/null +++ b/tests/components/ping/snapshots/test_binary_sensor.ambr @@ -0,0 +1,121 @@ +# serializer version: 1 +# name: test_sensor + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.10_10_10_10', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': '10.10.10.10', + 'platform': 'ping', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor.1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': '10.10.10.10', + 'round_trip_time_avg': 4.333, + 'round_trip_time_max': 10, + 'round_trip_time_mdev': '', + 'round_trip_time_min': 1, + }), + 'context': , + 'entity_id': 'binary_sensor.10_10_10_10', + 'last_changed': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor.2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': '10.10.10.10', + }), + 'context': , + 'entity_id': 'binary_sensor.10_10_10_10', + 'last_changed': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_and_update + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.10_10_10_10', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': '10.10.10.10', + 'platform': 'ping', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_and_update.1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': '10.10.10.10', + 'round_trip_time_avg': 4.333, + 'round_trip_time_max': 10, + 'round_trip_time_mdev': '', + 'round_trip_time_min': 1, + }), + 'context': , + 'entity_id': 'binary_sensor.10_10_10_10', + 'last_changed': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_and_update.2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': '10.10.10.10', + }), + 'context': , + 'entity_id': 'binary_sensor.10_10_10_10', + 'last_changed': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/ping/test_binary_sensor.py b/tests/components/ping/test_binary_sensor.py index 3389534483f..b1066895e2b 100644 --- a/tests/components/ping/test_binary_sensor.py +++ b/tests/components/ping/test_binary_sensor.py @@ -1,27 +1,75 @@ -"""The test for the ping binary_sensor platform.""" +"""Test the binary sensor platform of ping.""" +from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory +from icmplib import Host import pytest +from syrupy import SnapshotAssertion +from syrupy.filters import props -from homeassistant import config as hass_config, setup -from homeassistant.components.ping import DOMAIN -from homeassistant.const import SERVICE_RELOAD -from homeassistant.core import HomeAssistant +from homeassistant.components.ping.const import CONF_IMPORTED_BY, DOMAIN +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component -from tests.common import get_fixture_path +from tests.common import MockConfigEntry -@pytest.fixture -def mock_ping() -> None: - """Mock icmplib.ping.""" - with patch("homeassistant.components.ping.icmp_ping"): - yield +@pytest.mark.usefixtures("setup_integration") +async def test_setup_and_update( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test sensor setup and update.""" + + # check if binary sensor is there + entry = entity_registry.async_get("binary_sensor.10_10_10_10") + assert entry == snapshot(exclude=props("unique_id")) + + state = hass.states.get("binary_sensor.10_10_10_10") + assert state == snapshot + + # check if the sensor turns off. + with patch( + "homeassistant.components.ping.helpers.async_ping", + return_value=Host(address="10.10.10.10", packets_sent=10, rtts=[]), + ): + freezer.tick(timedelta(minutes=6)) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.10_10_10_10") + assert state == snapshot -async def test_reload(hass: HomeAssistant, mock_ping: None) -> None: - """Verify we can reload trend sensors.""" +async def test_disabled_after_import( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +): + """Test if binary sensor is disabled after import.""" + config_entry.data = {CONF_IMPORTED_BY: "device_tracker"} + config_entry.add_to_hass(hass) - await setup.async_setup_component( + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + # check if entity is disabled after import by device tracker + entry = entity_registry.async_get("binary_sensor.10_10_10_10") + assert entry + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +async def test_import_issue_creation( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +): + """Test if import issue is raised.""" + + await async_setup_component( hass, "binary_sensor", { @@ -35,21 +83,7 @@ async def test_reload(hass: HomeAssistant, mock_ping: None) -> None: ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == 1 - - assert hass.states.get("binary_sensor.test") - - yaml_path = get_fixture_path("configuration.yaml", "ping") - with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): - await hass.services.async_call( - DOMAIN, - SERVICE_RELOAD, - {}, - blocking=True, - ) - await hass.async_block_till_done() - - assert len(hass.states.async_all()) == 1 - - assert hass.states.get("binary_sensor.test") is None - assert hass.states.get("binary_sensor.test2") + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) + assert issue diff --git a/tests/components/ping/test_config_flow.py b/tests/components/ping/test_config_flow.py new file mode 100644 index 00000000000..6fff4ae7c71 --- /dev/null +++ b/tests/components/ping/test_config_flow.py @@ -0,0 +1,122 @@ +"""Test the Ping (ICMP) config flow.""" +from __future__ import annotations + +import pytest + +from homeassistant import config_entries +from homeassistant.components.ping import DOMAIN +from homeassistant.components.ping.const import CONF_IMPORTED_BY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import BINARY_SENSOR_IMPORT_DATA + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("host", "expected_title"), + (("192.618.178.1", "192.618.178.1"),), +) +@pytest.mark.usefixtures("patch_setup") +async def test_form(hass: HomeAssistant, host, expected_title) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] == FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": host, + }, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == expected_title + assert result["data"] == {} + assert result["options"] == { + "count": 5, + "host": host, + } + + +@pytest.mark.parametrize( + ("host", "count", "expected_title"), + (("192.618.178.1", 10, "192.618.178.1"),), +) +@pytest.mark.usefixtures("patch_setup") +async def test_options(hass: HomeAssistant, host, count, expected_title) -> None: + """Test options flow.""" + + config_entry = MockConfigEntry( + version=1, + source=config_entries.SOURCE_USER, + data={}, + domain=DOMAIN, + options={"count": count, "host": host}, + title=expected_title, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + { + "host": "10.10.10.1", + "count": count, + }, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "count": count, + "host": "10.10.10.1", + } + + +@pytest.mark.usefixtures("patch_setup") +async def test_step_import(hass: HomeAssistant) -> None: + """Test for import step.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_IMPORTED_BY: "binary_sensor", **BINARY_SENSOR_IMPORT_DATA}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == "test2" + assert result["data"] == {CONF_IMPORTED_BY: "binary_sensor"} + assert result["options"] == { + "host": "127.0.0.1", + "count": 1, + } + + # test import without name + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_IMPORTED_BY: "binary_sensor", "host": "10.10.10.10", "count": 5}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == "10.10.10.10" + assert result["data"] == {CONF_IMPORTED_BY: "binary_sensor"} + assert result["options"] == { + "host": "10.10.10.10", + "count": 5, + } diff --git a/tests/components/ping/test_device_tracker.py b/tests/components/ping/test_device_tracker.py new file mode 100644 index 00000000000..b6cc6b42912 --- /dev/null +++ b/tests/components/ping/test_device_tracker.py @@ -0,0 +1,62 @@ +"""Test the binary sensor platform of ping.""" + +import pytest + +from homeassistant.components.ping.const import DOMAIN +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("setup_integration") +async def test_setup_and_update( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, +) -> None: + """Test sensor setup and update.""" + + entry = entity_registry.async_get("device_tracker.10_10_10_10") + assert entry + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + # check device tracker state is not there + state = hass.states.get("device_tracker.10_10_10_10") + assert state is None + + # enable the entity + updated_entry = entity_registry.async_update_entity( + entity_id="device_tracker.10_10_10_10", disabled_by=None + ) + assert updated_entry != entry + assert updated_entry.disabled is False + + # reload config entry to enable entity + await hass.config_entries.async_reload(config_entry.entry_id) + await hass.async_block_till_done() + + # check device tracker is now "home" + state = hass.states.get("device_tracker.10_10_10_10") + assert state.state == "home" + + +async def test_import_issue_creation( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +): + """Test if import issue is raised.""" + + await async_setup_component( + hass, + "device_tracker", + {"device_tracker": {"platform": "ping", "hosts": {"test": "10.10.10.10"}}}, + ) + await hass.async_block_till_done() + + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) + assert issue diff --git a/tests/components/plex/test_config_flow.py b/tests/components/plex/test_config_flow.py index 235596715f4..47d70727890 100644 --- a/tests/components/plex/test_config_flow.py +++ b/tests/components/plex/test_config_flow.py @@ -851,7 +851,7 @@ async def test_client_header_issues( ), patch( "homeassistant.components.http.current_request.get", return_value=MockRequest() ), pytest.raises( - RuntimeError + RuntimeError, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} diff --git a/tests/components/plugwise/fixtures/adam_jip/all_data.json b/tests/components/plugwise/fixtures/adam_jip/all_data.json index bc1bc9c8c0c..dacee20c644 100644 --- a/tests/components/plugwise/fixtures/adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/adam_jip/all_data.json @@ -8,7 +8,7 @@ "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "06aecb3d00354375924f50c47af36bd2", - "mode": "heat", + "mode": "off", "model": "Lisa", "name": "Slaapkamer", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index e7e13e17357..9ef93d63bdd 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -21,6 +21,7 @@ "binary_sensors": { "compressor_state": true, "cooling_enabled": false, + "cooling_state": false, "dhw_state": false, "flame_state": false, "heating_state": true, @@ -40,7 +41,7 @@ "setpoint": 60.0, "upper_bound": 100.0 }, - "model": "Generic heater", + "model": "Generic heater/cooler", "name": "OpenTherm", "sensors": { "dhw_temperature": 46.3, @@ -72,7 +73,8 @@ "cooling_activation_outdoor_temperature": 21.0, "cooling_deactivation_threshold": 4.0, "illuminance": 86.0, - "setpoint": 20.5, + "setpoint_high": 30.0, + "setpoint_low": 20.5, "temperature": 19.3 }, "temperature_offset": { @@ -84,16 +86,18 @@ "thermostat": { "lower_bound": 4.0, "resolution": 0.1, - "setpoint": 20.5, + "setpoint_high": 30.0, + "setpoint_low": 20.5, "upper_bound": 30.0 }, "vendor": "Plugwise" } }, "gateway": { - "cooling_present": false, + "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", + "item_count": 66, "notifications": {}, "smile_name": "Smile Anna" } diff --git a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json index 126852e945d..624547155a3 100644 --- a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json @@ -55,22 +55,20 @@ "available_schedules": ["Weekschema", "Badkamer", "Test"], "dev_class": "thermostat", "location": "f2bf9048bef64cc5b6d5110154e33c81", - "mode": "heat_cool", + "mode": "cool", "model": "ThermoTouch", "name": "Anna", "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "select_schedule": "Weekschema", "selected_schedule": "None", "sensors": { - "setpoint_high": 23.5, - "setpoint_low": 4.0, + "setpoint": 23.5, "temperature": 25.8 }, "thermostat": { "lower_bound": 1.0, "resolution": 0.01, - "setpoint_high": 23.5, - "setpoint_low": 4.0, + "setpoint": 23.5, "upper_bound": 35.0 }, "vendor": "Plugwise" @@ -115,9 +113,8 @@ "select_schedule": "Badkamer", "sensors": { "battery": 56, - "setpoint_high": 23.5, - "setpoint_low": 20.0, - "temperature": 239 + "setpoint": 23.5, + "temperature": 23.9 }, "temperature_offset": { "lower_bound": -2.0, @@ -128,8 +125,7 @@ "thermostat": { "lower_bound": 0.0, "resolution": 0.01, - "setpoint_high": 25.0, - "setpoint_low": 19.0, + "setpoint": 25.0, "upper_bound": 99.9 }, "vendor": "Plugwise", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index 40364e620c3..844eae4c2f7 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -73,7 +73,7 @@ "cooling_activation_outdoor_temperature": 21.0, "cooling_deactivation_threshold": 4.0, "illuminance": 86.0, - "setpoint_high": 24.0, + "setpoint_high": 30.0, "setpoint_low": 20.5, "temperature": 26.3 }, @@ -86,7 +86,7 @@ "thermostat": { "lower_bound": 4.0, "resolution": 0.1, - "setpoint_high": 24.0, + "setpoint_high": 30.0, "setpoint_low": 20.5, "upper_bound": 30.0 }, @@ -97,6 +97,7 @@ "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", + "item_count": 66, "notifications": {}, "smile_name": "Smile Anna" } diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index 3a84a59deea..f6be6f35188 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -73,7 +73,7 @@ "cooling_activation_outdoor_temperature": 25.0, "cooling_deactivation_threshold": 4.0, "illuminance": 86.0, - "setpoint_high": 24.0, + "setpoint_high": 30.0, "setpoint_low": 20.5, "temperature": 23.0 }, @@ -86,7 +86,7 @@ "thermostat": { "lower_bound": 4.0, "resolution": 0.1, - "setpoint_high": 24.0, + "setpoint_high": 30.0, "setpoint_low": 20.5, "upper_bound": 30.0 }, @@ -97,6 +97,7 @@ "cooling_present": true, "gateway_id": "015ae9ea3f964e668e490fa39da3870b", "heater_id": "1cbf783bb11e4a7c8a6843dee3a86927", + "item_count": 66, "notifications": {}, "smile_name": "Smile Anna" } diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index d8ce2785f2a..c14fd802e3b 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -13,6 +13,10 @@ from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed +HA_PLUGWISE_SMILE_ASYNC_UPDATE = ( + "homeassistant.components.plugwise.coordinator.Smile.async_update" +) + async def test_adam_climate_entity_attributes( hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry @@ -21,7 +25,7 @@ async def test_adam_climate_entity_attributes( state = hass.states.get("climate.zone_lisa_wk") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_modes"] == [HVACMode.HEAT, HVACMode.AUTO] + assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] # hvac_action is not asserted as the fixture is not in line with recent firmware functionality assert "preset_modes" in state.attributes @@ -33,13 +37,13 @@ async def test_adam_climate_entity_attributes( assert state.attributes["supported_features"] == 17 assert state.attributes["temperature"] == 21.5 assert state.attributes["min_temp"] == 0.0 - assert state.attributes["max_temp"] == 99.9 + assert state.attributes["max_temp"] == 35.0 assert state.attributes["target_temp_step"] == 0.1 state = hass.states.get("climate.zone_thermostat_jessie") assert state assert state.state == HVACMode.AUTO - assert state.attributes["hvac_modes"] == [HVACMode.HEAT, HVACMode.AUTO] + assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] # hvac_action is not asserted as the fixture is not in line with recent firmware functionality assert "preset_modes" in state.attributes @@ -50,7 +54,7 @@ async def test_adam_climate_entity_attributes( assert state.attributes["preset_mode"] == "asleep" assert state.attributes["temperature"] == 15.0 assert state.attributes["min_temp"] == 0.0 - assert state.attributes["max_temp"] == 99.9 + assert state.attributes["max_temp"] == 35.0 assert state.attributes["target_temp_step"] == 0.1 @@ -62,13 +66,21 @@ async def test_adam_2_climate_entity_attributes( assert state assert state.state == HVACMode.HEAT assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [HVACMode.HEAT, HVACMode.AUTO] + assert state.attributes["hvac_modes"] == [ + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.HEAT, + ] state = hass.states.get("climate.lisa_badkamer") assert state assert state.state == HVACMode.AUTO assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [HVACMode.HEAT, HVACMode.AUTO] + assert state.attributes["hvac_modes"] == [ + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.HEAT, + ] async def test_adam_3_climate_entity_attributes( @@ -78,11 +90,58 @@ async def test_adam_3_climate_entity_attributes( state = hass.states.get("climate.anna") assert state - assert state.state == HVACMode.HEAT_COOL + assert state.state == HVACMode.COOL assert state.attributes["hvac_action"] == "cooling" assert state.attributes["hvac_modes"] == [ - HVACMode.HEAT_COOL, + HVACMode.OFF, HVACMode.AUTO, + HVACMode.COOL, + ] + data = mock_smile_adam_3.async_update.return_value + data.devices["da224107914542988a88561b4452b0f6"][ + "select_regulation_mode" + ] = "heating" + data.devices["ad4838d7d35c4d6ea796ee12ae5aedf8"]["mode"] = "heat" + data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ + "cooling_state" + ] = False + data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ + "heating_state" + ] = True + with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): + async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) + await hass.async_block_till_done() + state = hass.states.get("climate.anna") + assert state + assert state.state == HVACMode.HEAT + assert state.attributes["hvac_action"] == "heating" + assert state.attributes["hvac_modes"] == [ + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.HEAT, + ] + data = mock_smile_adam_3.async_update.return_value + data.devices["da224107914542988a88561b4452b0f6"][ + "select_regulation_mode" + ] = "cooling" + data.devices["ad4838d7d35c4d6ea796ee12ae5aedf8"]["mode"] = "cool" + data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ + "cooling_state" + ] = True + data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ + "heating_state" + ] = False + with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): + async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) + await hass.async_block_till_done() + state = hass.states.get("climate.anna") + assert state + assert state.state == HVACMode.COOL + assert state.attributes["hvac_action"] == "cooling" + assert state.attributes["hvac_modes"] == [ + HVACMode.OFF, + HVACMode.AUTO, + HVACMode.COOL, ] @@ -173,6 +232,60 @@ async def test_adam_climate_entity_climate_changes( ) +async def test_adam_climate_off_mode_change( + hass: HomeAssistant, + mock_smile_adam_4: MagicMock, + init_integration: MockConfigEntry, +) -> None: + """Test handling of user requests in adam climate device environment.""" + state = hass.states.get("climate.slaapkamer") + assert state + assert state.state == HVACMode.OFF + await hass.services.async_call( + "climate", + "set_hvac_mode", + { + "entity_id": "climate.slaapkamer", + "hvac_mode": "heat", + }, + blocking=True, + ) + assert mock_smile_adam_4.set_schedule_state.call_count == 1 + assert mock_smile_adam_4.set_regulation_mode.call_count == 1 + mock_smile_adam_4.set_regulation_mode.assert_called_with("heating") + + state = hass.states.get("climate.kinderkamer") + assert state + assert state.state == HVACMode.HEAT + await hass.services.async_call( + "climate", + "set_hvac_mode", + { + "entity_id": "climate.kinderkamer", + "hvac_mode": "off", + }, + blocking=True, + ) + assert mock_smile_adam_4.set_schedule_state.call_count == 1 + assert mock_smile_adam_4.set_regulation_mode.call_count == 2 + mock_smile_adam_4.set_regulation_mode.assert_called_with("off") + + state = hass.states.get("climate.logeerkamer") + assert state + assert state.state == HVACMode.HEAT + await hass.services.async_call( + "climate", + "set_hvac_mode", + { + "entity_id": "climate.logeerkamer", + "hvac_mode": "heat", + }, + blocking=True, + ) + assert mock_smile_adam_4.set_schedule_state.call_count == 1 + assert mock_smile_adam_4.set_regulation_mode.call_count == 2 + + async def test_anna_climate_entity_attributes( hass: HomeAssistant, mock_smile_anna: MagicMock, @@ -183,20 +296,18 @@ async def test_anna_climate_entity_attributes( assert state assert state.state == HVACMode.AUTO assert state.attributes["hvac_action"] == "heating" - assert state.attributes["hvac_modes"] == [ - HVACMode.HEAT, - HVACMode.AUTO, - ] + assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT_COOL] assert "no_frost" in state.attributes["preset_modes"] assert "home" in state.attributes["preset_modes"] assert state.attributes["current_temperature"] == 19.3 assert state.attributes["preset_mode"] == "home" - assert state.attributes["supported_features"] == 17 - assert state.attributes["temperature"] == 20.5 - assert state.attributes["min_temp"] == 4.0 - assert state.attributes["max_temp"] == 30.0 + assert state.attributes["supported_features"] == 18 + assert state.attributes["target_temp_high"] == 30 + assert state.attributes["target_temp_low"] == 20.5 + assert state.attributes["min_temp"] == 4 + assert state.attributes["max_temp"] == 30 assert state.attributes["target_temp_step"] == 0.1 @@ -211,11 +322,11 @@ async def test_anna_2_climate_entity_attributes( assert state.state == HVACMode.AUTO assert state.attributes["hvac_action"] == "cooling" assert state.attributes["hvac_modes"] == [ - HVACMode.HEAT_COOL, HVACMode.AUTO, + HVACMode.HEAT_COOL, ] assert state.attributes["supported_features"] == 18 - assert state.attributes["target_temp_high"] == 24.0 + assert state.attributes["target_temp_high"] == 30 assert state.attributes["target_temp_low"] == 20.5 @@ -230,8 +341,8 @@ async def test_anna_3_climate_entity_attributes( assert state.state == HVACMode.AUTO assert state.attributes["hvac_action"] == "idle" assert state.attributes["hvac_modes"] == [ - HVACMode.HEAT_COOL, HVACMode.AUTO, + HVACMode.HEAT_COOL, ] @@ -244,13 +355,13 @@ async def test_anna_climate_entity_climate_changes( await hass.services.async_call( "climate", "set_temperature", - {"entity_id": "climate.anna", "target_temp_high": 25, "target_temp_low": 20}, + {"entity_id": "climate.anna", "target_temp_high": 30, "target_temp_low": 20}, blocking=True, ) assert mock_smile_anna.set_temperature.call_count == 1 mock_smile_anna.set_temperature.assert_called_with( "c784ee9fdab44e1395b8dee7d7a497d5", - {"setpoint_high": 25.0, "setpoint_low": 20.0}, + {"setpoint_high": 30.0, "setpoint_low": 20.0}, ) await hass.services.async_call( @@ -270,29 +381,24 @@ async def test_anna_climate_entity_climate_changes( {"entity_id": "climate.anna", "hvac_mode": "auto"}, blocking=True, ) - assert mock_smile_anna.set_schedule_state.call_count == 1 - mock_smile_anna.set_schedule_state.assert_called_with( - "c784ee9fdab44e1395b8dee7d7a497d5", "on" - ) + # hvac_mode is already auto so not called. + assert mock_smile_anna.set_schedule_state.call_count == 0 await hass.services.async_call( "climate", "set_hvac_mode", - {"entity_id": "climate.anna", "hvac_mode": "heat"}, + {"entity_id": "climate.anna", "hvac_mode": "heat_cool"}, blocking=True, ) - assert mock_smile_anna.set_schedule_state.call_count == 2 + assert mock_smile_anna.set_schedule_state.call_count == 1 mock_smile_anna.set_schedule_state.assert_called_with( "c784ee9fdab44e1395b8dee7d7a497d5", "off" ) data = mock_smile_anna.async_update.return_value data.devices["3cb70739631c4d17a86b8b12e8a5161b"]["available_schedules"] = ["None"] - with patch( - "homeassistant.components.plugwise.coordinator.Smile.async_update", - return_value=data, - ): + with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): async_fire_time_changed(hass, utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() state = hass.states.get("climate.anna") assert state.state == HVACMode.HEAT - assert state.attributes["hvac_modes"] == [HVACMode.HEAT] + assert state.attributes["hvac_modes"] == [HVACMode.HEAT_COOL] diff --git a/tests/components/plugwise/test_select.py b/tests/components/plugwise/test_select.py index 9df20a5ffc8..f1220a07a2b 100644 --- a/tests/components/plugwise/test_select.py +++ b/tests/components/plugwise/test_select.py @@ -16,7 +16,7 @@ from tests.common import MockConfigEntry async def test_adam_select_entities( hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry ) -> None: - """Test a select.""" + """Test a thermostat Select.""" state = hass.states.get("select.zone_lisa_wk_thermostat_schedule") assert state @@ -44,3 +44,27 @@ async def test_adam_change_select_entity( "on", "Badkamer Schema", ) + + +async def test_adam_select_regulation_mode( + hass: HomeAssistant, mock_smile_adam_3: MagicMock, init_integration: MockConfigEntry +) -> None: + """Test a regulation_mode select. + + Also tests a change in climate _previous mode. + """ + + state = hass.states.get("select.adam_regulation_mode") + assert state + assert state.state == "cooling" + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + "entity_id": "select.adam_regulation_mode", + "option": "heating", + }, + blocking=True, + ) + assert mock_smile_adam_3.set_regulation_mode.call_count == 1 + mock_smile_adam_3.set_regulation_mode.assert_called_with("heating") diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index f24782b98d4..af2f2ba5784 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -20,6 +20,7 @@ from homeassistant.components import ( input_number, light, lock, + number, person, prometheus, sensor, @@ -292,6 +293,30 @@ async def test_input_number(client, input_number_entities) -> None: ) +@pytest.mark.parametrize("namespace", [""]) +async def test_number(client, number_entities) -> None: + """Test prometheus metrics for number.""" + body = await generate_latest_metrics(client) + + assert ( + 'number_state{domain="number",' + 'entity="number.threshold",' + 'friendly_name="Threshold"} 5.2' in body + ) + + assert ( + 'number_state{domain="number",' + 'entity="number.brightness",' + 'friendly_name="None"} 60.0' in body + ) + + assert ( + 'number_state_celsius{domain="number",' + 'entity="number.target_temperature",' + 'friendly_name="Target temperature"} 22.7' in body + ) + + @pytest.mark.parametrize("namespace", [""]) async def test_battery(client, sensor_entities) -> None: """Test prometheus metrics for battery.""" @@ -466,6 +491,12 @@ async def test_light(client, light_entities) -> None: 'friendly_name="PC"} 70.58823529411765' in body ) + assert ( + 'light_brightness_percent{domain="light",' + 'entity="light.hallway",' + 'friendly_name="Hallway"} 100.0' in body + ) + @pytest.mark.parametrize("namespace", [""]) async def test_lock(client, lock_entities) -> None: @@ -1382,6 +1413,46 @@ async def input_number_fixture( return data +@pytest.fixture(name="number_entities") +async def number_fixture( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> dict[str, er.RegistryEntry]: + """Simulate number entities.""" + data = {} + number_1 = entity_registry.async_get_or_create( + domain=number.DOMAIN, + platform="test", + unique_id="number_1", + suggested_object_id="threshold", + original_name="Threshold", + ) + set_state_with_entry(hass, number_1, 5.2) + data["number_1"] = number_1 + + number_2 = entity_registry.async_get_or_create( + domain=number.DOMAIN, + platform="test", + unique_id="number_2", + suggested_object_id="brightness", + ) + set_state_with_entry(hass, number_2, 60) + data["number_2"] = number_2 + + number_3 = entity_registry.async_get_or_create( + domain=number.DOMAIN, + platform="test", + unique_id="number_3", + suggested_object_id="target_temperature", + original_name="Target temperature", + unit_of_measurement=UnitOfTemperature.CELSIUS, + ) + set_state_with_entry(hass, number_3, 22.7) + data["number_3"] = number_3 + + await hass.async_block_till_done() + return data + + @pytest.fixture(name="input_boolean_entities") async def input_boolean_fixture( hass: HomeAssistant, entity_registry: er.EntityRegistry @@ -1492,6 +1563,19 @@ async def light_fixture( data["light_4"] = light_4 data["light_4_attributes"] = light_4_attributes + light_5 = entity_registry.async_get_or_create( + domain=light.DOMAIN, + platform="test", + unique_id="light_5", + suggested_object_id="hallway", + original_name="Hallway", + ) + # Light is on, but brightness is unset; expect metrics to report + # brightness of 100%. + light_5_attributes = {light.ATTR_BRIGHTNESS: None} + set_state_with_entry(hass, light_5, STATE_ON, light_5_attributes) + data["light_5"] = light_5 + data["light_5_attributes"] = light_5_attributes await hass.async_block_till_done() return data diff --git a/tests/components/pvpc_hourly_pricing/conftest.py b/tests/components/pvpc_hourly_pricing/conftest.py index fb2c9188ce7..efe15547c13 100644 --- a/tests/components/pvpc_hourly_pricing/conftest.py +++ b/tests/components/pvpc_hourly_pricing/conftest.py @@ -10,6 +10,7 @@ from tests.common import load_fixture from tests.test_util.aiohttp import AiohttpClientMocker FIXTURE_JSON_PUBLIC_DATA_2023_01_06 = "PVPC_DATA_2023_01_06.json" +FIXTURE_JSON_ESIOS_DATA_PVPC_2023_01_06 = "PRICES_ESIOS_1001_2023_01_06.json" def check_valid_state(state, tariff: str, value=None, key_attr=None): @@ -21,7 +22,7 @@ def check_valid_state(state, tariff: str, value=None, key_attr=None): ) try: _ = float(state.state) - # safety margins for current electricity price (it shouldn't be out of [0, 0.2]) + # safety margins for current electricity price (it shouldn't be out of [0, 0.5]) assert -0.1 < float(state.state) < 0.5 assert state.attributes[ATTR_TARIFF] == tariff except ValueError: @@ -41,20 +42,42 @@ def pvpc_aioclient_mock(aioclient_mock: AiohttpClientMocker): mask_url_public = ( "https://api.esios.ree.es/archives/70/download_json?locale=es&date={0}" ) - # new format for prices >= 2021-06-01 + mask_url_esios = ( + "https://api.esios.ree.es/indicators/1001" + "?start_date={0}T00:00&end_date={0}T23:59" + ) example_day = "2023-01-06" aioclient_mock.get( mask_url_public.format(example_day), text=load_fixture(f"{DOMAIN}/{FIXTURE_JSON_PUBLIC_DATA_2023_01_06}"), ) + aioclient_mock.get( + mask_url_esios.format(example_day), + text=load_fixture(f"{DOMAIN}/{FIXTURE_JSON_ESIOS_DATA_PVPC_2023_01_06}"), + ) + # simulate missing days aioclient_mock.get( mask_url_public.format("2023-01-07"), - status=HTTPStatus.BAD_GATEWAY, + status=HTTPStatus.OK, + text='{"message":"No values for specified archive"}', + ) + aioclient_mock.get( + mask_url_esios.format("2023-01-07"), + status=HTTPStatus.OK, text=( - '{"errors":[{"code":502,"status":"502","title":"Bad response from ESIOS",' - '"detail":"There are no data for the selected filters."}]}' + '{"indicator":{"name":"Término de facturación de energía activa del ' + 'PVPC 2.0TD","short_name":"PVPC T. 2.0TD","id":1001,"composited":false,' + '"step_type":"linear","disaggregated":true,"magnitud":' + '[{"name":"Precio","id":23}],"tiempo":[{"name":"Hora","id":4}],"geos":[],' + '"values_updated_at":null,"values":[]}}' ), ) + # simulate bad authentication + aioclient_mock.get( + mask_url_esios.format("2023-01-08"), + status=HTTPStatus.UNAUTHORIZED, + text="HTTP Token: Access denied.", + ) return aioclient_mock diff --git a/tests/components/pvpc_hourly_pricing/fixtures/PRICES_ESIOS_1001_2023_01_06.json b/tests/components/pvpc_hourly_pricing/fixtures/PRICES_ESIOS_1001_2023_01_06.json new file mode 100644 index 00000000000..20ad8af3696 --- /dev/null +++ b/tests/components/pvpc_hourly_pricing/fixtures/PRICES_ESIOS_1001_2023_01_06.json @@ -0,0 +1,1007 @@ +{ + "indicator": { + "name": "Término de facturación de energía activa del PVPC 2.0TD", + "short_name": "PVPC T. 2.0TD", + "id": 1001, + "composited": false, + "step_type": "linear", + "disaggregated": true, + "magnitud": [ + { + "name": "Precio", + "id": 23 + } + ], + "tiempo": [ + { + "name": "Hora", + "id": 4 + } + ], + "geos": [ + { + "geo_id": 8741, + "geo_name": "Península" + }, + { + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "geo_id": 8745, + "geo_name": "Melilla" + } + ], + "values_updated_at": "2023-01-05T20:17:31.000+01:00", + "values": [ + { + "value": 159.69, + "datetime": "2023-01-06T00:00:00.000+01:00", + "datetime_utc": "2023-01-05T23:00:00Z", + "tz_time": "2023-01-05T23:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 159.69, + "datetime": "2023-01-06T00:00:00.000+01:00", + "datetime_utc": "2023-01-05T23:00:00Z", + "tz_time": "2023-01-05T23:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 159.69, + "datetime": "2023-01-06T00:00:00.000+01:00", + "datetime_utc": "2023-01-05T23:00:00Z", + "tz_time": "2023-01-05T23:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 159.69, + "datetime": "2023-01-06T00:00:00.000+01:00", + "datetime_utc": "2023-01-05T23:00:00Z", + "tz_time": "2023-01-05T23:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 159.69, + "datetime": "2023-01-06T00:00:00.000+01:00", + "datetime_utc": "2023-01-05T23:00:00Z", + "tz_time": "2023-01-05T23:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 155.71, + "datetime": "2023-01-06T01:00:00.000+01:00", + "datetime_utc": "2023-01-06T00:00:00Z", + "tz_time": "2023-01-06T00:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 155.71, + "datetime": "2023-01-06T01:00:00.000+01:00", + "datetime_utc": "2023-01-06T00:00:00Z", + "tz_time": "2023-01-06T00:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 155.71, + "datetime": "2023-01-06T01:00:00.000+01:00", + "datetime_utc": "2023-01-06T00:00:00Z", + "tz_time": "2023-01-06T00:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 155.71, + "datetime": "2023-01-06T01:00:00.000+01:00", + "datetime_utc": "2023-01-06T00:00:00Z", + "tz_time": "2023-01-06T00:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 155.71, + "datetime": "2023-01-06T01:00:00.000+01:00", + "datetime_utc": "2023-01-06T00:00:00Z", + "tz_time": "2023-01-06T00:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 154.41, + "datetime": "2023-01-06T02:00:00.000+01:00", + "datetime_utc": "2023-01-06T01:00:00Z", + "tz_time": "2023-01-06T01:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 154.41, + "datetime": "2023-01-06T02:00:00.000+01:00", + "datetime_utc": "2023-01-06T01:00:00Z", + "tz_time": "2023-01-06T01:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 154.41, + "datetime": "2023-01-06T02:00:00.000+01:00", + "datetime_utc": "2023-01-06T01:00:00Z", + "tz_time": "2023-01-06T01:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 154.41, + "datetime": "2023-01-06T02:00:00.000+01:00", + "datetime_utc": "2023-01-06T01:00:00Z", + "tz_time": "2023-01-06T01:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 154.41, + "datetime": "2023-01-06T02:00:00.000+01:00", + "datetime_utc": "2023-01-06T01:00:00Z", + "tz_time": "2023-01-06T01:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 139.37, + "datetime": "2023-01-06T03:00:00.000+01:00", + "datetime_utc": "2023-01-06T02:00:00Z", + "tz_time": "2023-01-06T02:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 139.37, + "datetime": "2023-01-06T03:00:00.000+01:00", + "datetime_utc": "2023-01-06T02:00:00Z", + "tz_time": "2023-01-06T02:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 139.37, + "datetime": "2023-01-06T03:00:00.000+01:00", + "datetime_utc": "2023-01-06T02:00:00Z", + "tz_time": "2023-01-06T02:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 139.37, + "datetime": "2023-01-06T03:00:00.000+01:00", + "datetime_utc": "2023-01-06T02:00:00Z", + "tz_time": "2023-01-06T02:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 139.37, + "datetime": "2023-01-06T03:00:00.000+01:00", + "datetime_utc": "2023-01-06T02:00:00Z", + "tz_time": "2023-01-06T02:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 134.02, + "datetime": "2023-01-06T04:00:00.000+01:00", + "datetime_utc": "2023-01-06T03:00:00Z", + "tz_time": "2023-01-06T03:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 134.02, + "datetime": "2023-01-06T04:00:00.000+01:00", + "datetime_utc": "2023-01-06T03:00:00Z", + "tz_time": "2023-01-06T03:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 134.02, + "datetime": "2023-01-06T04:00:00.000+01:00", + "datetime_utc": "2023-01-06T03:00:00Z", + "tz_time": "2023-01-06T03:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 134.02, + "datetime": "2023-01-06T04:00:00.000+01:00", + "datetime_utc": "2023-01-06T03:00:00Z", + "tz_time": "2023-01-06T03:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 134.02, + "datetime": "2023-01-06T04:00:00.000+01:00", + "datetime_utc": "2023-01-06T03:00:00Z", + "tz_time": "2023-01-06T03:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 140.02, + "datetime": "2023-01-06T05:00:00.000+01:00", + "datetime_utc": "2023-01-06T04:00:00Z", + "tz_time": "2023-01-06T04:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 140.02, + "datetime": "2023-01-06T05:00:00.000+01:00", + "datetime_utc": "2023-01-06T04:00:00Z", + "tz_time": "2023-01-06T04:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 140.02, + "datetime": "2023-01-06T05:00:00.000+01:00", + "datetime_utc": "2023-01-06T04:00:00Z", + "tz_time": "2023-01-06T04:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 140.02, + "datetime": "2023-01-06T05:00:00.000+01:00", + "datetime_utc": "2023-01-06T04:00:00Z", + "tz_time": "2023-01-06T04:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 140.02, + "datetime": "2023-01-06T05:00:00.000+01:00", + "datetime_utc": "2023-01-06T04:00:00Z", + "tz_time": "2023-01-06T04:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 154.05, + "datetime": "2023-01-06T06:00:00.000+01:00", + "datetime_utc": "2023-01-06T05:00:00Z", + "tz_time": "2023-01-06T05:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 154.05, + "datetime": "2023-01-06T06:00:00.000+01:00", + "datetime_utc": "2023-01-06T05:00:00Z", + "tz_time": "2023-01-06T05:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 154.05, + "datetime": "2023-01-06T06:00:00.000+01:00", + "datetime_utc": "2023-01-06T05:00:00Z", + "tz_time": "2023-01-06T05:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 154.05, + "datetime": "2023-01-06T06:00:00.000+01:00", + "datetime_utc": "2023-01-06T05:00:00Z", + "tz_time": "2023-01-06T05:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 154.05, + "datetime": "2023-01-06T06:00:00.000+01:00", + "datetime_utc": "2023-01-06T05:00:00Z", + "tz_time": "2023-01-06T05:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 163.15, + "datetime": "2023-01-06T07:00:00.000+01:00", + "datetime_utc": "2023-01-06T06:00:00Z", + "tz_time": "2023-01-06T06:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 163.15, + "datetime": "2023-01-06T07:00:00.000+01:00", + "datetime_utc": "2023-01-06T06:00:00Z", + "tz_time": "2023-01-06T06:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 163.15, + "datetime": "2023-01-06T07:00:00.000+01:00", + "datetime_utc": "2023-01-06T06:00:00Z", + "tz_time": "2023-01-06T06:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 163.15, + "datetime": "2023-01-06T07:00:00.000+01:00", + "datetime_utc": "2023-01-06T06:00:00Z", + "tz_time": "2023-01-06T06:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 163.15, + "datetime": "2023-01-06T07:00:00.000+01:00", + "datetime_utc": "2023-01-06T06:00:00Z", + "tz_time": "2023-01-06T06:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 180.5, + "datetime": "2023-01-06T08:00:00.000+01:00", + "datetime_utc": "2023-01-06T07:00:00Z", + "tz_time": "2023-01-06T07:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 180.5, + "datetime": "2023-01-06T08:00:00.000+01:00", + "datetime_utc": "2023-01-06T07:00:00Z", + "tz_time": "2023-01-06T07:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 180.5, + "datetime": "2023-01-06T08:00:00.000+01:00", + "datetime_utc": "2023-01-06T07:00:00Z", + "tz_time": "2023-01-06T07:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 180.5, + "datetime": "2023-01-06T08:00:00.000+01:00", + "datetime_utc": "2023-01-06T07:00:00Z", + "tz_time": "2023-01-06T07:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 180.5, + "datetime": "2023-01-06T08:00:00.000+01:00", + "datetime_utc": "2023-01-06T07:00:00Z", + "tz_time": "2023-01-06T07:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 174.9, + "datetime": "2023-01-06T09:00:00.000+01:00", + "datetime_utc": "2023-01-06T08:00:00Z", + "tz_time": "2023-01-06T08:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 174.9, + "datetime": "2023-01-06T09:00:00.000+01:00", + "datetime_utc": "2023-01-06T08:00:00Z", + "tz_time": "2023-01-06T08:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 174.9, + "datetime": "2023-01-06T09:00:00.000+01:00", + "datetime_utc": "2023-01-06T08:00:00Z", + "tz_time": "2023-01-06T08:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 174.9, + "datetime": "2023-01-06T09:00:00.000+01:00", + "datetime_utc": "2023-01-06T08:00:00Z", + "tz_time": "2023-01-06T08:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 174.9, + "datetime": "2023-01-06T09:00:00.000+01:00", + "datetime_utc": "2023-01-06T08:00:00Z", + "tz_time": "2023-01-06T08:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 166.47, + "datetime": "2023-01-06T10:00:00.000+01:00", + "datetime_utc": "2023-01-06T09:00:00Z", + "tz_time": "2023-01-06T09:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 166.47, + "datetime": "2023-01-06T10:00:00.000+01:00", + "datetime_utc": "2023-01-06T09:00:00Z", + "tz_time": "2023-01-06T09:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 166.47, + "datetime": "2023-01-06T10:00:00.000+01:00", + "datetime_utc": "2023-01-06T09:00:00Z", + "tz_time": "2023-01-06T09:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 166.47, + "datetime": "2023-01-06T10:00:00.000+01:00", + "datetime_utc": "2023-01-06T09:00:00Z", + "tz_time": "2023-01-06T09:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 166.47, + "datetime": "2023-01-06T10:00:00.000+01:00", + "datetime_utc": "2023-01-06T09:00:00Z", + "tz_time": "2023-01-06T09:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 152.3, + "datetime": "2023-01-06T11:00:00.000+01:00", + "datetime_utc": "2023-01-06T10:00:00Z", + "tz_time": "2023-01-06T10:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 152.3, + "datetime": "2023-01-06T11:00:00.000+01:00", + "datetime_utc": "2023-01-06T10:00:00Z", + "tz_time": "2023-01-06T10:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 152.3, + "datetime": "2023-01-06T11:00:00.000+01:00", + "datetime_utc": "2023-01-06T10:00:00Z", + "tz_time": "2023-01-06T10:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 152.3, + "datetime": "2023-01-06T11:00:00.000+01:00", + "datetime_utc": "2023-01-06T10:00:00Z", + "tz_time": "2023-01-06T10:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 152.3, + "datetime": "2023-01-06T11:00:00.000+01:00", + "datetime_utc": "2023-01-06T10:00:00Z", + "tz_time": "2023-01-06T10:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 144.54, + "datetime": "2023-01-06T12:00:00.000+01:00", + "datetime_utc": "2023-01-06T11:00:00Z", + "tz_time": "2023-01-06T11:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 144.54, + "datetime": "2023-01-06T12:00:00.000+01:00", + "datetime_utc": "2023-01-06T11:00:00Z", + "tz_time": "2023-01-06T11:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 144.54, + "datetime": "2023-01-06T12:00:00.000+01:00", + "datetime_utc": "2023-01-06T11:00:00Z", + "tz_time": "2023-01-06T11:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 144.54, + "datetime": "2023-01-06T12:00:00.000+01:00", + "datetime_utc": "2023-01-06T11:00:00Z", + "tz_time": "2023-01-06T11:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 144.54, + "datetime": "2023-01-06T12:00:00.000+01:00", + "datetime_utc": "2023-01-06T11:00:00Z", + "tz_time": "2023-01-06T11:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 132.08, + "datetime": "2023-01-06T13:00:00.000+01:00", + "datetime_utc": "2023-01-06T12:00:00Z", + "tz_time": "2023-01-06T12:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 132.08, + "datetime": "2023-01-06T13:00:00.000+01:00", + "datetime_utc": "2023-01-06T12:00:00Z", + "tz_time": "2023-01-06T12:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 132.08, + "datetime": "2023-01-06T13:00:00.000+01:00", + "datetime_utc": "2023-01-06T12:00:00Z", + "tz_time": "2023-01-06T12:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 132.08, + "datetime": "2023-01-06T13:00:00.000+01:00", + "datetime_utc": "2023-01-06T12:00:00Z", + "tz_time": "2023-01-06T12:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 132.08, + "datetime": "2023-01-06T13:00:00.000+01:00", + "datetime_utc": "2023-01-06T12:00:00Z", + "tz_time": "2023-01-06T12:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 119.6, + "datetime": "2023-01-06T14:00:00.000+01:00", + "datetime_utc": "2023-01-06T13:00:00Z", + "tz_time": "2023-01-06T13:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 119.6, + "datetime": "2023-01-06T14:00:00.000+01:00", + "datetime_utc": "2023-01-06T13:00:00Z", + "tz_time": "2023-01-06T13:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 119.6, + "datetime": "2023-01-06T14:00:00.000+01:00", + "datetime_utc": "2023-01-06T13:00:00Z", + "tz_time": "2023-01-06T13:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 119.6, + "datetime": "2023-01-06T14:00:00.000+01:00", + "datetime_utc": "2023-01-06T13:00:00Z", + "tz_time": "2023-01-06T13:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 119.6, + "datetime": "2023-01-06T14:00:00.000+01:00", + "datetime_utc": "2023-01-06T13:00:00Z", + "tz_time": "2023-01-06T13:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 108.74, + "datetime": "2023-01-06T15:00:00.000+01:00", + "datetime_utc": "2023-01-06T14:00:00Z", + "tz_time": "2023-01-06T14:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 108.74, + "datetime": "2023-01-06T15:00:00.000+01:00", + "datetime_utc": "2023-01-06T14:00:00Z", + "tz_time": "2023-01-06T14:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 108.74, + "datetime": "2023-01-06T15:00:00.000+01:00", + "datetime_utc": "2023-01-06T14:00:00Z", + "tz_time": "2023-01-06T14:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 108.74, + "datetime": "2023-01-06T15:00:00.000+01:00", + "datetime_utc": "2023-01-06T14:00:00Z", + "tz_time": "2023-01-06T14:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 108.74, + "datetime": "2023-01-06T15:00:00.000+01:00", + "datetime_utc": "2023-01-06T14:00:00Z", + "tz_time": "2023-01-06T14:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 123.79, + "datetime": "2023-01-06T16:00:00.000+01:00", + "datetime_utc": "2023-01-06T15:00:00Z", + "tz_time": "2023-01-06T15:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 123.79, + "datetime": "2023-01-06T16:00:00.000+01:00", + "datetime_utc": "2023-01-06T15:00:00Z", + "tz_time": "2023-01-06T15:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 123.79, + "datetime": "2023-01-06T16:00:00.000+01:00", + "datetime_utc": "2023-01-06T15:00:00Z", + "tz_time": "2023-01-06T15:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 123.79, + "datetime": "2023-01-06T16:00:00.000+01:00", + "datetime_utc": "2023-01-06T15:00:00Z", + "tz_time": "2023-01-06T15:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 123.79, + "datetime": "2023-01-06T16:00:00.000+01:00", + "datetime_utc": "2023-01-06T15:00:00Z", + "tz_time": "2023-01-06T15:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 166.41, + "datetime": "2023-01-06T17:00:00.000+01:00", + "datetime_utc": "2023-01-06T16:00:00Z", + "tz_time": "2023-01-06T16:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 166.41, + "datetime": "2023-01-06T17:00:00.000+01:00", + "datetime_utc": "2023-01-06T16:00:00Z", + "tz_time": "2023-01-06T16:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 166.41, + "datetime": "2023-01-06T17:00:00.000+01:00", + "datetime_utc": "2023-01-06T16:00:00Z", + "tz_time": "2023-01-06T16:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 166.41, + "datetime": "2023-01-06T17:00:00.000+01:00", + "datetime_utc": "2023-01-06T16:00:00Z", + "tz_time": "2023-01-06T16:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 166.41, + "datetime": "2023-01-06T17:00:00.000+01:00", + "datetime_utc": "2023-01-06T16:00:00Z", + "tz_time": "2023-01-06T16:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 173.49, + "datetime": "2023-01-06T18:00:00.000+01:00", + "datetime_utc": "2023-01-06T17:00:00Z", + "tz_time": "2023-01-06T17:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 173.49, + "datetime": "2023-01-06T18:00:00.000+01:00", + "datetime_utc": "2023-01-06T17:00:00Z", + "tz_time": "2023-01-06T17:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 173.49, + "datetime": "2023-01-06T18:00:00.000+01:00", + "datetime_utc": "2023-01-06T17:00:00Z", + "tz_time": "2023-01-06T17:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 173.49, + "datetime": "2023-01-06T18:00:00.000+01:00", + "datetime_utc": "2023-01-06T17:00:00Z", + "tz_time": "2023-01-06T17:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 173.49, + "datetime": "2023-01-06T18:00:00.000+01:00", + "datetime_utc": "2023-01-06T17:00:00Z", + "tz_time": "2023-01-06T17:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 186.17, + "datetime": "2023-01-06T19:00:00.000+01:00", + "datetime_utc": "2023-01-06T18:00:00Z", + "tz_time": "2023-01-06T18:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 186.17, + "datetime": "2023-01-06T19:00:00.000+01:00", + "datetime_utc": "2023-01-06T18:00:00Z", + "tz_time": "2023-01-06T18:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 186.17, + "datetime": "2023-01-06T19:00:00.000+01:00", + "datetime_utc": "2023-01-06T18:00:00Z", + "tz_time": "2023-01-06T18:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 186.17, + "datetime": "2023-01-06T19:00:00.000+01:00", + "datetime_utc": "2023-01-06T18:00:00Z", + "tz_time": "2023-01-06T18:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 186.17, + "datetime": "2023-01-06T19:00:00.000+01:00", + "datetime_utc": "2023-01-06T18:00:00Z", + "tz_time": "2023-01-06T18:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 186.11, + "datetime": "2023-01-06T20:00:00.000+01:00", + "datetime_utc": "2023-01-06T19:00:00Z", + "tz_time": "2023-01-06T19:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 186.11, + "datetime": "2023-01-06T20:00:00.000+01:00", + "datetime_utc": "2023-01-06T19:00:00Z", + "tz_time": "2023-01-06T19:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 186.11, + "datetime": "2023-01-06T20:00:00.000+01:00", + "datetime_utc": "2023-01-06T19:00:00Z", + "tz_time": "2023-01-06T19:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 186.11, + "datetime": "2023-01-06T20:00:00.000+01:00", + "datetime_utc": "2023-01-06T19:00:00Z", + "tz_time": "2023-01-06T19:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 186.11, + "datetime": "2023-01-06T20:00:00.000+01:00", + "datetime_utc": "2023-01-06T19:00:00Z", + "tz_time": "2023-01-06T19:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 178.45, + "datetime": "2023-01-06T21:00:00.000+01:00", + "datetime_utc": "2023-01-06T20:00:00Z", + "tz_time": "2023-01-06T20:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 178.45, + "datetime": "2023-01-06T21:00:00.000+01:00", + "datetime_utc": "2023-01-06T20:00:00Z", + "tz_time": "2023-01-06T20:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 178.45, + "datetime": "2023-01-06T21:00:00.000+01:00", + "datetime_utc": "2023-01-06T20:00:00Z", + "tz_time": "2023-01-06T20:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 178.45, + "datetime": "2023-01-06T21:00:00.000+01:00", + "datetime_utc": "2023-01-06T20:00:00Z", + "tz_time": "2023-01-06T20:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 178.45, + "datetime": "2023-01-06T21:00:00.000+01:00", + "datetime_utc": "2023-01-06T20:00:00Z", + "tz_time": "2023-01-06T20:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 139.37, + "datetime": "2023-01-06T22:00:00.000+01:00", + "datetime_utc": "2023-01-06T21:00:00Z", + "tz_time": "2023-01-06T21:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 139.37, + "datetime": "2023-01-06T22:00:00.000+01:00", + "datetime_utc": "2023-01-06T21:00:00Z", + "tz_time": "2023-01-06T21:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 139.37, + "datetime": "2023-01-06T22:00:00.000+01:00", + "datetime_utc": "2023-01-06T21:00:00Z", + "tz_time": "2023-01-06T21:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 139.37, + "datetime": "2023-01-06T22:00:00.000+01:00", + "datetime_utc": "2023-01-06T21:00:00Z", + "tz_time": "2023-01-06T21:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 139.37, + "datetime": "2023-01-06T22:00:00.000+01:00", + "datetime_utc": "2023-01-06T21:00:00Z", + "tz_time": "2023-01-06T21:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + }, + { + "value": 129.35, + "datetime": "2023-01-06T23:00:00.000+01:00", + "datetime_utc": "2023-01-06T22:00:00Z", + "tz_time": "2023-01-06T22:00:00.000Z", + "geo_id": 8741, + "geo_name": "Península" + }, + { + "value": 129.35, + "datetime": "2023-01-06T23:00:00.000+01:00", + "datetime_utc": "2023-01-06T22:00:00Z", + "tz_time": "2023-01-06T22:00:00.000Z", + "geo_id": 8742, + "geo_name": "Canarias" + }, + { + "value": 129.35, + "datetime": "2023-01-06T23:00:00.000+01:00", + "datetime_utc": "2023-01-06T22:00:00Z", + "tz_time": "2023-01-06T22:00:00.000Z", + "geo_id": 8743, + "geo_name": "Baleares" + }, + { + "value": 129.35, + "datetime": "2023-01-06T23:00:00.000+01:00", + "datetime_utc": "2023-01-06T22:00:00Z", + "tz_time": "2023-01-06T22:00:00.000Z", + "geo_id": 8744, + "geo_name": "Ceuta" + }, + { + "value": 129.35, + "datetime": "2023-01-06T23:00:00.000+01:00", + "datetime_utc": "2023-01-06T22:00:00Z", + "tz_time": "2023-01-06T22:00:00.000Z", + "geo_id": 8745, + "geo_name": "Melilla" + } + ] + } +} diff --git a/tests/components/pvpc_hourly_pricing/test_config_flow.py b/tests/components/pvpc_hourly_pricing/test_config_flow.py index 6560c81ebbb..950aea8e32c 100644 --- a/tests/components/pvpc_hourly_pricing/test_config_flow.py +++ b/tests/components/pvpc_hourly_pricing/test_config_flow.py @@ -4,14 +4,15 @@ from datetime import datetime, timedelta from freezegun.api import FrozenDateTimeFactory from homeassistant import config_entries, data_entry_flow -from homeassistant.components.pvpc_hourly_pricing import ( +from homeassistant.components.pvpc_hourly_pricing.const import ( ATTR_POWER, ATTR_POWER_P3, ATTR_TARIFF, + CONF_USE_API_TOKEN, DOMAIN, TARIFFS, ) -from homeassistant.const import CONF_NAME +from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util @@ -22,6 +23,7 @@ from tests.common import async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker _MOCK_TIME_VALID_RESPONSES = datetime(2023, 1, 6, 12, 0, tzinfo=dt_util.UTC) +_MOCK_TIME_BAD_AUTH_RESPONSES = datetime(2023, 1, 8, 12, 0, tzinfo=dt_util.UTC) async def test_config_flow( @@ -35,7 +37,7 @@ async def test_config_flow( - Check state and attributes - Check abort when trying to config another with same tariff - Check removal and add again to check state restoration - - Configure options to change power and tariff to "2.0TD" + - Configure options to introduce API Token, with bad auth and good one """ freezer.move_to(_MOCK_TIME_VALID_RESPONSES) hass.config.set_time_zone("Europe/Madrid") @@ -44,6 +46,7 @@ async def test_config_flow( ATTR_TARIFF: TARIFFS[1], ATTR_POWER: 4.6, ATTR_POWER_P3: 5.75, + CONF_USE_API_TOKEN: False, } result = await hass.config_entries.flow.async_init( @@ -107,8 +110,17 @@ async def test_config_flow( result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={ATTR_POWER: 3.0, ATTR_POWER_P3: 4.6}, + user_input={ATTR_POWER: 3.0, ATTR_POWER_P3: 4.6, CONF_USE_API_TOKEN: True}, ) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "api_token" + assert pvpc_aioclient_mock.call_count == 2 + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={CONF_API_TOKEN: "good-token"} + ) + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert pvpc_aioclient_mock.call_count == 2 await hass.async_block_till_done() state = hass.states.get("sensor.esios_pvpc") check_valid_state(state, tariff=TARIFFS[1]) @@ -125,3 +137,96 @@ async def test_config_flow( check_valid_state(state, tariff=TARIFFS[0], value="unavailable") assert "period" not in state.attributes assert pvpc_aioclient_mock.call_count == 4 + + # disable api token in options + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ATTR_POWER: 3.0, ATTR_POWER_P3: 4.6, CONF_USE_API_TOKEN: False}, + ) + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert pvpc_aioclient_mock.call_count == 4 + await hass.async_block_till_done() + + +async def test_reauth( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + pvpc_aioclient_mock: AiohttpClientMocker, +) -> None: + """Test reauth flow for API-token mode.""" + freezer.move_to(_MOCK_TIME_BAD_AUTH_RESPONSES) + hass.config.set_time_zone("Europe/Madrid") + tst_config = { + CONF_NAME: "test", + ATTR_TARIFF: TARIFFS[1], + ATTR_POWER: 4.6, + ATTR_POWER_P3: 5.75, + CONF_USE_API_TOKEN: True, + } + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == data_entry_flow.FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], tst_config + ) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "api_token" + assert pvpc_aioclient_mock.call_count == 0 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_TOKEN: "bad-token"} + ) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "api_token" + assert result["errors"]["base"] == "invalid_auth" + assert pvpc_aioclient_mock.call_count == 1 + + freezer.move_to(_MOCK_TIME_VALID_RESPONSES) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_TOKEN: "good-token"} + ) + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + config_entry = result["result"] + assert pvpc_aioclient_mock.call_count == 3 + + # check reauth trigger with bad-auth responses + freezer.move_to(_MOCK_TIME_BAD_AUTH_RESPONSES) + async_fire_time_changed(hass, _MOCK_TIME_BAD_AUTH_RESPONSES) + await hass.async_block_till_done() + assert pvpc_aioclient_mock.call_count == 4 + + result = hass.config_entries.flow.async_progress_by_handler(DOMAIN)[0] + assert result["context"]["entry_id"] == config_entry.entry_id + assert result["context"]["source"] == config_entries.SOURCE_REAUTH + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_TOKEN: "bad-token"} + ) + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert pvpc_aioclient_mock.call_count == 5 + + result = hass.config_entries.flow.async_progress_by_handler(DOMAIN)[0] + assert result["context"]["entry_id"] == config_entry.entry_id + assert result["context"]["source"] == config_entries.SOURCE_REAUTH + assert result["step_id"] == "reauth_confirm" + + freezer.move_to(_MOCK_TIME_VALID_RESPONSES) + async_fire_time_changed(hass, _MOCK_TIME_VALID_RESPONSES) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_TOKEN: "good-token"} + ) + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert pvpc_aioclient_mock.call_count == 6 + + await hass.async_block_till_done() + assert pvpc_aioclient_mock.call_count == 7 diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index 9326869b272..4744c065ede 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -367,7 +367,7 @@ async def test_service_descriptions(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.python_script.os.path.exists", return_value=True ), patch_yaml_files( - services_yaml1 + services_yaml1, ): await async_setup_component(hass, DOMAIN, {}) @@ -416,7 +416,7 @@ async def test_service_descriptions(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.python_script.os.path.exists", return_value=True ), patch_yaml_files( - services_yaml2 + services_yaml2, ): await hass.services.async_call(DOMAIN, "reload", {}, blocking=True) descriptions = await async_get_all_descriptions(hass) diff --git a/tests/components/rainbird/test_calendar.py b/tests/components/rainbird/test_calendar.py index 04e423a399c..922ec7b0a5a 100644 --- a/tests/components/rainbird/test_calendar.py +++ b/tests/components/rainbird/test_calendar.py @@ -232,7 +232,8 @@ async def test_calendar_not_supported_by_device( @pytest.mark.parametrize( - "mock_insert_schedule_response", [([None])] # Disable success responses + "mock_insert_schedule_response", + [([None])], # Disable success responses ) async def test_no_schedule( hass: HomeAssistant, diff --git a/tests/components/rainbird/test_init.py b/tests/components/rainbird/test_init.py index db9c4c8739e..00cbefc6556 100644 --- a/tests/components/rainbird/test_init.py +++ b/tests/components/rainbird/test_init.py @@ -3,6 +3,7 @@ from __future__ import annotations from http import HTTPStatus +from typing import Any import pytest @@ -10,7 +11,7 @@ from homeassistant.components.rainbird.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_MAC from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from .conftest import ( CONFIG_ENTRY_DATA, @@ -35,7 +36,7 @@ async def test_init_success( ) -> None: """Test successful setup and unload.""" - await config_entry.async_setup(hass) + await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state == ConfigEntryState.LOADED await hass.config_entries.async_unload(config_entry.entry_id) @@ -86,7 +87,7 @@ async def test_communication_failure( config_entry_state: list[ConfigEntryState], ) -> None: """Test unable to talk to device on startup, which fails setup.""" - await config_entry.async_setup(hass) + await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state == config_entry_state @@ -115,7 +116,7 @@ async def test_fix_unique_id( assert entries[0].unique_id is None assert entries[0].data.get(CONF_MAC) is None - await config_entry.async_setup(hass) + await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state == ConfigEntryState.LOADED # Verify config entry now has a unique id @@ -167,7 +168,7 @@ async def test_fix_unique_id_failure( responses.insert(0, initial_response) - await config_entry.async_setup(hass) + await hass.config_entries.async_setup(config_entry.entry_id) # Config entry is loaded, but not updated assert config_entry.state == ConfigEntryState.LOADED assert config_entry.unique_id is None @@ -202,14 +203,10 @@ async def test_fix_unique_id_duplicate( responses.append(mock_json_response(WIFI_PARAMS_RESPONSE)) responses.extend(responses_copy) - await config_entry.async_setup(hass) + await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state == ConfigEntryState.LOADED assert config_entry.unique_id == MAC_ADDRESS_UNIQUE_ID - await other_entry.async_setup(hass) - # Config entry unique id could not be updated since it already exists - assert other_entry.state == ConfigEntryState.SETUP_ERROR - assert "Unable to fix missing unique id (already exists)" in caplog.text await hass.async_block_till_done() @@ -221,39 +218,65 @@ async def test_fix_unique_id_duplicate( "config_entry_unique_id", "serial_number", "entity_unique_id", + "device_identifier", "expected_unique_id", + "expected_device_identifier", ), [ - (SERIAL_NUMBER, SERIAL_NUMBER, SERIAL_NUMBER, MAC_ADDRESS_UNIQUE_ID), + ( + SERIAL_NUMBER, + SERIAL_NUMBER, + SERIAL_NUMBER, + str(SERIAL_NUMBER), + MAC_ADDRESS_UNIQUE_ID, + MAC_ADDRESS_UNIQUE_ID, + ), ( SERIAL_NUMBER, SERIAL_NUMBER, f"{SERIAL_NUMBER}-rain-delay", + f"{SERIAL_NUMBER}-1", f"{MAC_ADDRESS_UNIQUE_ID}-rain-delay", + f"{MAC_ADDRESS_UNIQUE_ID}-1", ), - ("0", 0, "0", MAC_ADDRESS_UNIQUE_ID), + ( + SERIAL_NUMBER, + SERIAL_NUMBER, + SERIAL_NUMBER, + SERIAL_NUMBER, + MAC_ADDRESS_UNIQUE_ID, + MAC_ADDRESS_UNIQUE_ID, + ), + ("0", 0, "0", "0", MAC_ADDRESS_UNIQUE_ID, MAC_ADDRESS_UNIQUE_ID), ( "0", 0, "0-rain-delay", + "0-1", f"{MAC_ADDRESS_UNIQUE_ID}-rain-delay", + f"{MAC_ADDRESS_UNIQUE_ID}-1", ), ( MAC_ADDRESS_UNIQUE_ID, SERIAL_NUMBER, MAC_ADDRESS_UNIQUE_ID, MAC_ADDRESS_UNIQUE_ID, + MAC_ADDRESS_UNIQUE_ID, + MAC_ADDRESS_UNIQUE_ID, ), ( MAC_ADDRESS_UNIQUE_ID, SERIAL_NUMBER, f"{MAC_ADDRESS_UNIQUE_ID}-rain-delay", + f"{MAC_ADDRESS_UNIQUE_ID}-1", f"{MAC_ADDRESS_UNIQUE_ID}-rain-delay", + f"{MAC_ADDRESS_UNIQUE_ID}-1", ), ], ids=( "serial-number", "serial-number-with-suffix", + "serial-number-int", "zero-serial", "zero-serial-suffix", "new-format", @@ -264,18 +287,150 @@ async def test_fix_entity_unique_ids( hass: HomeAssistant, config_entry: MockConfigEntry, entity_unique_id: str, + device_identifier: str, expected_unique_id: str, + expected_device_identifier: str, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, ) -> None: """Test fixing entity unique ids from old unique id formats.""" - entity_registry = er.async_get(hass) entity_entry = entity_registry.async_get_or_create( DOMAIN, "number", unique_id=entity_unique_id, config_entry=config_entry ) + device_entry = device_registry.async_get_or_create( + identifiers={(DOMAIN, device_identifier)}, + config_entry_id=config_entry.entry_id, + serial_number=config_entry.data["serial_number"], + ) - await config_entry.async_setup(hass) + await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state == ConfigEntryState.LOADED entity_entry = entity_registry.async_get(entity_entry.id) assert entity_entry assert entity_entry.unique_id == expected_unique_id + + device_entry = device_registry.async_get_device( + {(DOMAIN, expected_device_identifier)} + ) + assert device_entry + assert device_entry.identifiers == {(DOMAIN, expected_device_identifier)} + + +@pytest.mark.parametrize( + ( + "entry1_updates", + "entry2_updates", + "expected_device_name", + "expected_disabled_by", + ), + [ + ({}, {}, None, None), + ( + { + "name_by_user": "Front Sprinkler", + }, + {}, + "Front Sprinkler", + None, + ), + ( + {}, + { + "name_by_user": "Front Sprinkler", + }, + "Front Sprinkler", + None, + ), + ( + { + "name_by_user": "Sprinkler 1", + }, + { + "name_by_user": "Sprinkler 2", + }, + "Sprinkler 2", + None, + ), + ( + { + "disabled_by": dr.DeviceEntryDisabler.USER, + }, + {}, + None, + None, + ), + ( + {}, + { + "disabled_by": dr.DeviceEntryDisabler.USER, + }, + None, + None, + ), + ( + { + "disabled_by": dr.DeviceEntryDisabler.USER, + }, + { + "disabled_by": dr.DeviceEntryDisabler.USER, + }, + None, + dr.DeviceEntryDisabler.USER, + ), + ], + ids=[ + "duplicates", + "prefer-old-name", + "prefer-new-name", + "both-names-prefers-new", + "old-disabled-prefer-new", + "new-disabled-prefer-old", + "both-disabled", + ], +) +async def test_fix_duplicate_device_ids( + hass: HomeAssistant, + config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + entry1_updates: dict[str, Any], + entry2_updates: dict[str, Any], + expected_device_name: str | None, + expected_disabled_by: dr.DeviceEntryDisabler | None, +) -> None: + """Test fixing duplicate device ids.""" + + entry1 = device_registry.async_get_or_create( + identifiers={(DOMAIN, str(SERIAL_NUMBER))}, + config_entry_id=config_entry.entry_id, + serial_number=config_entry.data["serial_number"], + ) + device_registry.async_update_device(entry1.id, **entry1_updates) + + entry2 = device_registry.async_get_or_create( + identifiers={(DOMAIN, MAC_ADDRESS_UNIQUE_ID)}, + config_entry_id=config_entry.entry_id, + serial_number=config_entry.data["serial_number"], + ) + device_registry.async_update_device(entry2.id, **entry2_updates) + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + assert len(device_entries) == 2 + + await hass.config_entries.async_setup(config_entry.entry_id) + assert config_entry.state == ConfigEntryState.LOADED + + # Only the device with the new format exists + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + assert len(device_entries) == 1 + + device_entry = device_registry.async_get_device({(DOMAIN, MAC_ADDRESS_UNIQUE_ID)}) + assert device_entry + assert device_entry.identifiers == {(DOMAIN, MAC_ADDRESS_UNIQUE_ID)} + assert device_entry.name_by_user == expected_device_name + assert device_entry.disabled_by == expected_disabled_by diff --git a/tests/components/rainmachine/conftest.py b/tests/components/rainmachine/conftest.py index 685f307d197..2697e908c94 100644 --- a/tests/components/rainmachine/conftest.py +++ b/tests/components/rainmachine/conftest.py @@ -134,7 +134,8 @@ async def setup_rainmachine_fixture(hass, client, config): ), patch( "homeassistant.components.rainmachine.config_flow.Client", return_value=client ), patch( - "homeassistant.components.rainmachine.PLATFORMS", [] + "homeassistant.components.rainmachine.PLATFORMS", + [], ): assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index a982eeb39be..d0ed6f15d43 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -412,17 +412,11 @@ def old_db_schema(schema_version_postfix: str) -> Iterator[None]: recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object( core, "EventTypes", old_db_schema.EventTypes - ), patch.object( - core, "EventData", old_db_schema.EventData - ), patch.object( + ), patch.object(core, "EventData", old_db_schema.EventData), patch.object( core, "States", old_db_schema.States - ), patch.object( - core, "Events", old_db_schema.Events - ), patch.object( + ), patch.object(core, "Events", old_db_schema.Events), patch.object( core, "StateAttributes", old_db_schema.StateAttributes - ), patch.object( - core, "EntityIDMigrationTask", core.RecorderTask - ), patch( + ), patch.object(core, "EntityIDMigrationTask", core.RecorderTask), patch( CREATE_ENGINE_TARGET, new=partial( create_engine_test_for_schema_version_postfix, diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 852419559b2..b9d0801d788 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -85,17 +85,11 @@ def db_schema_32(): recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object( core, "EventTypes", old_db_schema.EventTypes - ), patch.object( - core, "EventData", old_db_schema.EventData - ), patch.object( + ), patch.object(core, "EventData", old_db_schema.EventData), patch.object( core, "States", old_db_schema.States - ), patch.object( - core, "Events", old_db_schema.Events - ), patch.object( + ), patch.object(core, "Events", old_db_schema.Events), patch.object( core, "StateAttributes", old_db_schema.StateAttributes - ), patch.object( - core, "EntityIDMigrationTask", core.RecorderTask - ), patch( + ), patch.object(core, "EntityIDMigrationTask", core.RecorderTask), patch( CREATE_ENGINE_TARGET, new=_create_engine_test ): yield diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 4faa8dc7e8a..1696c9018b4 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -244,9 +244,7 @@ async def test_purge_old_states_encounters_temporary_mysql_error( ) as sleep_mock, patch( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=[mysql_exception, None], - ), patch.object( - instance.engine.dialect, "name", "mysql" - ): + ), patch.object(instance.engine.dialect, "name", "mysql"): await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index f386fd19e36..e8f9130165f 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -212,9 +212,7 @@ async def test_purge_old_states_encounters_temporary_mysql_error( ) as sleep_mock, patch( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=[mysql_exception, None], - ), patch.object( - instance.engine.dialect, "name", "mysql" - ): + ), patch.object(instance.engine.dialect, "name", "mysql"): await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index a7b15a7f12d..0a30895adc9 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -25,6 +25,7 @@ from homeassistant.components.recorder.models import ( process_timestamp, ) from homeassistant.components.recorder.util import ( + chunked_or_all, end_incomplete_runs, is_second_sunday, resolve_period, @@ -1023,3 +1024,24 @@ async def test_resolve_period(hass: HomeAssistant) -> None: } } ) == (now - timedelta(hours=1, minutes=25), now - timedelta(minutes=25)) + + +def test_chunked_or_all(): + """Test chunked_or_all can iterate chunk sizes larger than the passed in collection.""" + all = [] + incoming = (1, 2, 3, 4) + for chunk in chunked_or_all(incoming, 2): + assert len(chunk) == 2 + all.extend(chunk) + assert all == [1, 2, 3, 4] + + all = [] + incoming = (1, 2, 3, 4) + for chunk in chunked_or_all(incoming, 5): + assert len(chunk) == 4 + # Verify the chunk is the same object as the incoming + # collection since we want to avoid copying the collection + # if we don't need to + assert chunk is incoming + all.extend(chunk) + assert all == [1, 2, 3, 4] diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 98f401e45d8..b11cc67707f 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -98,13 +98,9 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object( core, "EventTypes", old_db_schema.EventTypes - ), patch.object( - core, "EventData", old_db_schema.EventData - ), patch.object( + ), patch.object(core, "EventData", old_db_schema.EventData), patch.object( core, "States", old_db_schema.States - ), patch.object( - core, "Events", old_db_schema.Events - ), patch( + ), patch.object(core, "Events", old_db_schema.Events), patch( CREATE_ENGINE_TARGET, new=_create_engine_test ), patch( "homeassistant.components.recorder.Recorder._migrate_events_context_ids", @@ -269,13 +265,9 @@ async def test_migrate_can_resume_entity_id_post_migration( recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object( core, "EventTypes", old_db_schema.EventTypes - ), patch.object( - core, "EventData", old_db_schema.EventData - ), patch.object( + ), patch.object(core, "EventData", old_db_schema.EventData), patch.object( core, "States", old_db_schema.States - ), patch.object( - core, "Events", old_db_schema.Events - ), patch( + ), patch.object(core, "Events", old_db_schema.Events), patch( CREATE_ENGINE_TARGET, new=_create_engine_test ), patch( "homeassistant.components.recorder.Recorder._migrate_events_context_ids", diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index b371d69fe5f..323b81211d7 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -2227,9 +2227,7 @@ async def test_recorder_info_migration_queue_exhausted( ), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, - ), patch.object( - recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1 - ), patch.object( + ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), patch.object( recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0 ), patch( "homeassistant.components.recorder.migration._apply_update", diff --git a/tests/components/remote/test_significant_change.py b/tests/components/remote/test_significant_change.py new file mode 100644 index 00000000000..dcbfce213d6 --- /dev/null +++ b/tests/components/remote/test_significant_change.py @@ -0,0 +1,62 @@ +"""Test the Remote significant change platform.""" +from homeassistant.components.remote import ATTR_ACTIVITY_LIST, ATTR_CURRENT_ACTIVITY +from homeassistant.components.remote.significant_change import ( + async_check_significant_change, +) + + +async def test_significant_change() -> None: + """Detect Remote significant changes.""" + # no change at all + attrs = { + ATTR_CURRENT_ACTIVITY: "playing", + ATTR_ACTIVITY_LIST: ["playing", "paused"], + } + assert not async_check_significant_change(None, "on", attrs, "on", attrs) + + # change of state is significant + assert async_check_significant_change(None, "on", attrs, "off", attrs) + + # change of current activity is significant + attrs = { + "old": { + ATTR_CURRENT_ACTIVITY: "playing", + ATTR_ACTIVITY_LIST: ["playing", "paused"], + }, + "new": { + ATTR_CURRENT_ACTIVITY: "paused", + ATTR_ACTIVITY_LIST: ["playing", "paused"], + }, + } + assert async_check_significant_change(None, "on", attrs["old"], "on", attrs["new"]) + + # change of list of possible activities is not significant + attrs = { + "old": { + ATTR_CURRENT_ACTIVITY: "playing", + ATTR_ACTIVITY_LIST: ["playing", "paused"], + }, + "new": { + ATTR_CURRENT_ACTIVITY: "playing", + ATTR_ACTIVITY_LIST: ["playing"], + }, + } + assert not async_check_significant_change( + None, "on", attrs["old"], "on", attrs["new"] + ) + + # change of any not official attribute is not significant + attrs = { + "old": { + ATTR_CURRENT_ACTIVITY: "playing", + ATTR_ACTIVITY_LIST: ["playing", "paused"], + }, + "new": { + ATTR_CURRENT_ACTIVITY: "playing", + ATTR_ACTIVITY_LIST: ["playing", "paused"], + "not_official": "changed", + }, + } + assert not async_check_significant_change( + None, "on", attrs["old"], "on", attrs["new"] + ) diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index 3efc1e481df..464d4120c65 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -19,9 +19,14 @@ TEST_USERNAME2 = "username" TEST_PASSWORD = "password" TEST_PASSWORD2 = "new_password" TEST_MAC = "ab:cd:ef:gh:ij:kl" +TEST_MAC2 = "12:34:56:78:9a:bc" +TEST_UID = "ABC1234567D89EFG" TEST_PORT = 1234 TEST_NVR_NAME = "test_reolink_name" +TEST_NVR_NAME2 = "test2_reolink_name" TEST_USE_HTTPS = True +TEST_HOST_MODEL = "RLN8-410" +TEST_CAM_MODEL = "RLC-123" @pytest.fixture @@ -51,6 +56,7 @@ def reolink_connect_class( host_mock.unsubscribe.return_value = True host_mock.logout.return_value = True host_mock.mac_address = TEST_MAC + host_mock.uid = TEST_UID host_mock.onvif_enabled = True host_mock.rtmp_enabled = True host_mock.rtsp_enabled = True @@ -59,14 +65,30 @@ def reolink_connect_class( host_mock.use_https = TEST_USE_HTTPS host_mock.is_admin = True host_mock.user_level = "admin" + host_mock.protocol = "rtsp" + host_mock.channels = [0] + host_mock.stream_channels = [0] host_mock.sw_version_update_required = False host_mock.hardware_version = "IPC_00000" host_mock.sw_version = "v1.0.0.0.0.0000" host_mock.manufacturer = "Reolink" - host_mock.model = "RLC-123" + host_mock.model = TEST_HOST_MODEL + host_mock.camera_model.return_value = TEST_CAM_MODEL + host_mock.camera_name.return_value = TEST_NVR_NAME + host_mock.camera_sw_version.return_value = "v1.1.0.0.0.0000" host_mock.session_active = True host_mock.timeout = 60 host_mock.renewtimer.return_value = 600 + host_mock.wifi_connection = False + host_mock.wifi_signal = None + host_mock.whiteled_mode_list.return_value = [] + host_mock.zoom_range.return_value = { + "zoom": {"pos": {"min": 0, "max": 100}}, + "focus": {"pos": {"min": 0, "max": 100}}, + } + host_mock.capabilities = {"Host": ["RTSP"], "0": ["motion_detection"]} + host_mock.checked_api_versions = {"GetEvents": 1} + host_mock.abilities = {"abilityChn": [{"aiTrack": {"permit": 0, "ver": 0}}]} yield host_mock_class diff --git a/tests/components/reolink/snapshots/test_diagnostics.ambr b/tests/components/reolink/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..9f70673695c --- /dev/null +++ b/tests/components/reolink/snapshots/test_diagnostics.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'HTTP(S) port': 1234, + 'HTTPS': True, + 'IPC cams': dict({ + '0': dict({ + 'firmware version': 'v1.1.0.0.0.0000', + 'model': 'RLC-123', + }), + }), + 'ONVIF enabled': True, + 'RTMP enabled': True, + 'RTSP enabled': True, + 'WiFi connection': False, + 'WiFi signal': None, + 'abilities': dict({ + 'abilityChn': list([ + dict({ + 'aiTrack': dict({ + 'permit': 0, + 'ver': 0, + }), + }), + ]), + }), + 'api versions': dict({ + 'GetEvents': 1, + }), + 'capabilities': dict({ + '0': list([ + 'motion_detection', + ]), + 'Host': list([ + 'RTSP', + ]), + }), + 'channels': list([ + 0, + ]), + 'event connection': 'Fast polling', + 'firmware version': 'v1.0.0.0.0.0000', + 'hardware version': 'IPC_00000', + 'model': 'RLN8-410', + 'stream channels': list([ + 0, + ]), + 'stream protocol': 'rtsp', + }) +# --- diff --git a/tests/components/reolink/test_diagnostics.py b/tests/components/reolink/test_diagnostics.py new file mode 100644 index 00000000000..57b474c13ad --- /dev/null +++ b/tests/components/reolink/test_diagnostics.py @@ -0,0 +1,25 @@ +"""Test Reolink diagnostics.""" + +from unittest.mock import MagicMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test Reolink diagnostics.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag == snapshot diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index e2bd622bb43..6a9a8b957db 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -11,11 +11,15 @@ from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow -from .conftest import TEST_NVR_NAME +from .conftest import TEST_CAM_MODEL, TEST_HOST_MODEL, TEST_NVR_NAME from tests.common import MockConfigEntry, async_fire_time_changed @@ -102,6 +106,7 @@ async def test_entry_reloading( reolink_connect: MagicMock, ) -> None: """Test the entry is reloaded correctly when settings change.""" + reolink_connect.is_nvr = False assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -115,6 +120,58 @@ async def test_entry_reloading( assert config_entry.title == "New Name" +@pytest.mark.parametrize( + ("attr", "value", "expected_models"), + [ + ( + None, + None, + [TEST_HOST_MODEL, TEST_CAM_MODEL], + ), + ("channels", [], [TEST_HOST_MODEL]), + ( + "camera_model", + Mock(return_value="RLC-567"), + [TEST_HOST_MODEL, "RLC-567"], + ), + ], +) +async def test_cleanup_disconnected_cams( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + attr: str | None, + value: Any, + expected_models: list[str], +) -> None: + """Test device and entity registry are cleaned up when camera is disconnected from NVR.""" + reolink_connect.channels = [0] + # setup CH 0 and NVR switch entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + device_models = [device.model for device in device_entries] + assert sorted(device_models) == sorted([TEST_HOST_MODEL, TEST_CAM_MODEL]) + + # reload integration after 'disconnecting' a camera. + if attr is not None: + setattr(reolink_connect, attr, value) + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_reload(config_entry.entry_id) + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + device_models = [device.model for device in device_entries] + assert sorted(device_models) == sorted(expected_models) + + async def test_no_repair_issue( hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py new file mode 100644 index 00000000000..7fe3570564a --- /dev/null +++ b/tests/components/reolink/test_media_source.py @@ -0,0 +1,288 @@ +"""Tests for the Reolink media_source platform.""" +from datetime import datetime, timedelta +import logging +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.media_source import ( + DOMAIN as MEDIA_SOURCE_DOMAIN, + URI_SCHEME, + async_browse_media, + async_resolve_media, +) +from homeassistant.components.media_source.error import Unresolvable +from homeassistant.components.reolink import const +from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL +from homeassistant.components.reolink.const import DOMAIN +from homeassistant.components.stream import DOMAIN as MEDIA_STREAM_DOMAIN +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import format_mac +from homeassistant.setup import async_setup_component + +from .conftest import ( + TEST_HOST2, + TEST_MAC2, + TEST_NVR_NAME, + TEST_NVR_NAME2, + TEST_PASSWORD2, + TEST_PORT, + TEST_USE_HTTPS, + TEST_USERNAME2, +) + +from tests.common import MockConfigEntry + +TEST_YEAR = 2023 +TEST_MONTH = 11 +TEST_DAY = 14 +TEST_DAY2 = 15 +TEST_HOUR = 13 +TEST_MINUTE = 12 +TEST_FILE_NAME = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00" +TEST_STREAM = "main" +TEST_CHANNEL = "0" + +TEST_MIME_TYPE = "application/x-mpegURL" +TEST_URL = "http:test_url" + + +@pytest.fixture(autouse=True) +async def setup_component(hass: HomeAssistant) -> None: + """Set up component.""" + assert await async_setup_component(hass, MEDIA_SOURCE_DOMAIN, {}) + assert await async_setup_component(hass, MEDIA_STREAM_DOMAIN, {}) + + +async def test_resolve( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test resolving Reolink media items.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + reolink_connect.get_vod_source.return_value = (TEST_MIME_TYPE, TEST_URL) + caplog.set_level(logging.DEBUG) + + file_id = ( + f"FILE|{config_entry.entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_FILE_NAME}" + ) + + play_media = await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/{file_id}") + + assert play_media.mime_type == TEST_MIME_TYPE + + +async def test_browsing( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test browsing the Reolink three.""" + entry_id = config_entry.entry_id + reolink_connect.api_version.return_value = 1 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(entry_id) is True + await hass.async_block_till_done() + + entries = dr.async_entries_for_config_entry(device_registry, entry_id) + assert len(entries) > 0 + device_registry.async_update_device(entries[0].id, name_by_user="Cam new name") + + caplog.set_level(logging.DEBUG) + + # browse root + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + + browse_root_id = f"CAM|{entry_id}|{TEST_CHANNEL}" + assert browse.domain == DOMAIN + assert browse.title == "Reolink" + assert browse.identifier is None + assert browse.children[0].identifier == browse_root_id + + # browse resolution select + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}") + + browse_resolution_id = f"RESs|{entry_id}|{TEST_CHANNEL}" + browse_res_sub_id = f"RES|{entry_id}|{TEST_CHANNEL}|sub" + browse_res_main_id = f"RES|{entry_id}|{TEST_CHANNEL}|main" + assert browse.domain == DOMAIN + assert browse.title == TEST_NVR_NAME + assert browse.identifier == browse_resolution_id + assert browse.children[0].identifier == browse_res_sub_id + assert browse.children[1].identifier == browse_res_main_id + + # browse camera recording days + mock_status = MagicMock() + mock_status.year = TEST_YEAR + mock_status.month = TEST_MONTH + mock_status.days = (TEST_DAY, TEST_DAY2) + reolink_connect.request_vod_files.return_value = ([mock_status], []) + + browse = await async_browse_media( + hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_main_id}" + ) + + browse_days_id = f"DAYS|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}" + browse_day_0_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY}" + browse_day_1_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY2}" + assert browse.domain == DOMAIN + assert browse.title == f"{TEST_NVR_NAME} High res." + assert browse.identifier == browse_days_id + assert browse.children[0].identifier == browse_day_0_id + assert browse.children[1].identifier == browse_day_1_id + + # browse camera recording files on day + mock_vod_file = MagicMock() + mock_vod_file.start_time = datetime( + TEST_YEAR, TEST_MONTH, TEST_DAY, TEST_HOUR, TEST_MINUTE + ) + mock_vod_file.duration = timedelta(minutes=15) + mock_vod_file.file_name = TEST_FILE_NAME + reolink_connect.request_vod_files.return_value = ([mock_status], [mock_vod_file]) + + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_day_0_id}") + + browse_files_id = f"FILES|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}" + browse_file_id = f"FILE|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_FILE_NAME}" + assert browse.domain == DOMAIN + assert ( + browse.title == f"{TEST_NVR_NAME} High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" + ) + assert browse.identifier == browse_files_id + assert browse.children[0].identifier == browse_file_id + + +async def test_browsing_unsupported_encoding( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test browsing a Reolink camera with unsupported stream encoding.""" + entry_id = config_entry.entry_id + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(entry_id) is True + await hass.async_block_till_done() + + browse_root_id = f"CAM|{entry_id}|{TEST_CHANNEL}" + + # browse resolution select/camera recording days when main encoding unsupported + mock_status = MagicMock() + mock_status.year = TEST_YEAR + mock_status.month = TEST_MONTH + mock_status.days = (TEST_DAY, TEST_DAY2) + reolink_connect.request_vod_files.return_value = ([mock_status], []) + reolink_connect.time.return_value = None + reolink_connect.get_encoding.return_value = "h265" + + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}") + + browse_days_id = f"DAYS|{entry_id}|{TEST_CHANNEL}|sub" + browse_day_0_id = ( + f"DAY|{entry_id}|{TEST_CHANNEL}|sub|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY}" + ) + browse_day_1_id = ( + f"DAY|{entry_id}|{TEST_CHANNEL}|sub|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY2}" + ) + assert browse.domain == DOMAIN + assert browse.title == f"{TEST_NVR_NAME} Low res." + assert browse.identifier == browse_days_id + assert browse.children[0].identifier == browse_day_0_id + assert browse.children[1].identifier == browse_day_1_id + + +async def test_browsing_rec_playback_unsupported( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test browsing a Reolink camera which does not support playback of recordings.""" + reolink_connect.api_version.return_value = 0 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + + # browse root + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + + assert browse.domain == DOMAIN + assert browse.title == "Reolink" + assert browse.identifier is None + assert browse.children == [] + + +async def test_browsing_errors( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test browsing a Reolink camera errors.""" + reolink_connect.api_version.return_value = 1 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + + # browse root + with pytest.raises(Unresolvable): + await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/UNKNOWN") + with pytest.raises(Unresolvable): + await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/UNKNOWN") + + +async def test_browsing_not_loaded( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test browsing a Reolink camera integration which is not loaded.""" + reolink_connect.api_version.return_value = 1 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + + reolink_connect.get_host_data = AsyncMock(side_effect=ReolinkError("Test error")) + config_entry2 = MockConfigEntry( + domain=const.DOMAIN, + unique_id=format_mac(TEST_MAC2), + data={ + CONF_HOST: TEST_HOST2, + CONF_USERNAME: TEST_USERNAME2, + CONF_PASSWORD: TEST_PASSWORD2, + CONF_PORT: TEST_PORT, + const.CONF_USE_HTTPS: TEST_USE_HTTPS, + }, + options={ + const.CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME2, + ) + config_entry2.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry2.entry_id) is False + await hass.async_block_till_done() + + # browse root + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + + assert browse.domain == DOMAIN + assert browse.title == "Reolink" + assert browse.identifier is None + assert len(browse.children) == 1 diff --git a/tests/components/rest/test_switch.py b/tests/components/rest/test_switch.py index d57cd41aa10..df90af44e73 100644 --- a/tests/components/rest/test_switch.py +++ b/tests/components/rest/test_switch.py @@ -61,7 +61,10 @@ async def test_setup_missing_config( assert await async_setup_component(hass, SWITCH_DOMAIN, config) await hass.async_block_till_done() assert_setup_component(0, SWITCH_DOMAIN) - assert "Invalid config for [switch.rest]: required key not provided" in caplog.text + assert ( + "Invalid config for 'switch.rest': required key 'resource' not provided" + in caplog.text + ) async def test_setup_missing_schema( @@ -72,7 +75,7 @@ async def test_setup_missing_schema( assert await async_setup_component(hass, SWITCH_DOMAIN, config) await hass.async_block_till_done() assert_setup_component(0, SWITCH_DOMAIN) - assert "Invalid config for [switch.rest]: invalid url" in caplog.text + assert "Invalid config for 'switch.rest': invalid url" in caplog.text @respx.mock diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index 2b6edf86132..e9800393835 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -1,13 +1,74 @@ """Configuration for Ring tests.""" +from collections.abc import Generator import re +from unittest.mock import AsyncMock, Mock, patch import pytest import requests_mock -from tests.common import load_fixture +from homeassistant.components.ring import DOMAIN +from homeassistant.const import CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_fixture from tests.components.light.conftest import mock_light_profiles # noqa: F401 +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.ring.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_ring_auth(): + """Mock ring_doorbell.Auth.""" + with patch("ring_doorbell.Auth", autospec=True) as mock_ring_auth: + mock_ring_auth.return_value.fetch_token.return_value = { + "access_token": "mock-token" + } + yield mock_ring_auth.return_value + + +@pytest.fixture +def mock_ring(): + """Mock ring_doorbell.Ring.""" + with patch("ring_doorbell.Ring", autospec=True) as mock_ring: + yield mock_ring.return_value + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock ConfigEntry.""" + return MockConfigEntry( + title="Ring", + domain=DOMAIN, + data={ + CONF_USERNAME: "foo@bar.com", + "token": {"access_token": "mock-token"}, + }, + unique_id="foo@bar.com", + ) + + +@pytest.fixture +async def mock_added_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_ring_auth: Mock, + mock_ring: Mock, +) -> MockConfigEntry: + """Mock ConfigEntry that's been added to HA.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert DOMAIN in hass.config_entries.async_domains() + return mock_config_entry + + @pytest.fixture(name="requests_mock") def requests_mock_fixture(): """Fixture to provide a requests mocker.""" @@ -52,5 +113,11 @@ def requests_mock_fixture(): re.compile(r"https:\/\/api\.ring\.com\/clients_api\/chimes\/\d+\/health"), text=load_fixture("chime_health_attrs.json", "ring"), ) - + mock.get( + re.compile( + r"https:\/\/api\.ring\.com\/clients_api\/dings\/\d+\/share/play" + ), + status_code=200, + json={"url": "http://127.0.0.1/foo"}, + ) yield mock diff --git a/tests/components/ring/snapshots/test_diagnostics.ambr b/tests/components/ring/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..64e753ba2b3 --- /dev/null +++ b/tests/components/ring/snapshots/test_diagnostics.ambr @@ -0,0 +1,579 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'device_data': list([ + dict({ + 'address': '**REDACTED**', + 'alerts': dict({ + 'connection': 'online', + }), + 'description': '**REDACTED**', + 'device_id': '**REDACTED**', + 'do_not_disturb': dict({ + 'seconds_left': 0, + }), + 'features': dict({ + 'ringtones_enabled': True, + }), + 'firmware_version': '1.2.3', + 'id': '**REDACTED**', + 'kind': 'chime', + 'latitude': '**REDACTED**', + 'longitude': '**REDACTED**', + 'owned': True, + 'owner': dict({ + 'email': '**REDACTED**', + 'first_name': '**REDACTED**', + 'id': '**REDACTED**', + 'last_name': '**REDACTED**', + }), + 'settings': dict({ + 'ding_audio_id': None, + 'ding_audio_user_id': None, + 'motion_audio_id': None, + 'motion_audio_user_id': None, + 'volume': 2, + }), + 'time_zone': 'America/New_York', + }), + dict({ + 'address': '**REDACTED**', + 'alerts': dict({ + 'connection': 'online', + }), + 'battery_life': 4081, + 'description': '**REDACTED**', + 'device_id': '**REDACTED**', + 'external_connection': False, + 'features': dict({ + 'advanced_motion_enabled': False, + 'motion_message_enabled': False, + 'motions_enabled': True, + 'people_only_enabled': False, + 'shadow_correction_enabled': False, + 'show_recordings': True, + }), + 'firmware_version': '1.4.26', + 'id': '**REDACTED**', + 'kind': 'lpd_v1', + 'latitude': '**REDACTED**', + 'longitude': '**REDACTED**', + 'motion_snooze': None, + 'owned': True, + 'owner': dict({ + 'email': '**REDACTED**', + 'first_name': '**REDACTED**', + 'id': '**REDACTED**', + 'last_name': '**REDACTED**', + }), + 'settings': dict({ + 'chime_settings': dict({ + 'duration': 3, + 'enable': True, + 'type': 0, + }), + 'doorbell_volume': 1, + 'enable_vod': True, + 'live_view_preset_profile': 'highest', + 'live_view_presets': list([ + 'low', + 'middle', + 'high', + 'highest', + ]), + 'motion_announcement': False, + 'motion_snooze_preset_profile': 'low', + 'motion_snooze_presets': list([ + 'null', + 'low', + 'medium', + 'high', + ]), + }), + 'subscribed': True, + 'subscribed_motions': True, + 'time_zone': 'America/New_York', + }), + dict({ + 'address': '**REDACTED**', + 'alerts': dict({ + 'connection': 'online', + }), + 'battery_life': 80, + 'description': '**REDACTED**', + 'device_id': '**REDACTED**', + 'external_connection': False, + 'features': dict({ + 'advanced_motion_enabled': False, + 'motion_message_enabled': False, + 'motions_enabled': True, + 'night_vision_enabled': False, + 'people_only_enabled': False, + 'shadow_correction_enabled': False, + 'show_recordings': True, + }), + 'firmware_version': '1.9.3', + 'id': '**REDACTED**', + 'kind': 'hp_cam_v1', + 'latitude': '**REDACTED**', + 'led_status': 'off', + 'location_id': None, + 'longitude': '**REDACTED**', + 'motion_snooze': dict({ + 'scheduled': True, + }), + 'night_mode_status': 'false', + 'owned': True, + 'owner': dict({ + 'email': '**REDACTED**', + 'first_name': '**REDACTED**', + 'id': '**REDACTED**', + 'last_name': '**REDACTED**', + }), + 'ring_cam_light_installed': 'false', + 'ring_id': None, + 'settings': dict({ + 'chime_settings': dict({ + 'duration': 10, + 'enable': True, + 'type': 0, + }), + 'doorbell_volume': 11, + 'enable_vod': True, + 'floodlight_settings': dict({ + 'duration': 30, + 'priority': 0, + }), + 'light_schedule_settings': dict({ + 'end_hour': 0, + 'end_minute': 0, + 'start_hour': 0, + 'start_minute': 0, + }), + 'live_view_preset_profile': 'highest', + 'live_view_presets': list([ + 'low', + 'middle', + 'high', + 'highest', + ]), + 'motion_announcement': False, + 'motion_snooze_preset_profile': 'low', + 'motion_snooze_presets': list([ + 'none', + 'low', + 'medium', + 'high', + ]), + 'motion_zones': dict({ + 'active_motion_filter': 1, + 'advanced_object_settings': dict({ + 'human_detection_confidence': dict({ + 'day': 0.7, + 'night': 0.7, + }), + 'motion_zone_overlap': dict({ + 'day': 0.1, + 'night': 0.2, + }), + 'object_size_maximum': dict({ + 'day': 0.8, + 'night': 0.8, + }), + 'object_size_minimum': dict({ + 'day': 0.03, + 'night': 0.05, + }), + 'object_time_overlap': dict({ + 'day': 0.1, + 'night': 0.6, + }), + }), + 'enable_audio': False, + 'pir_settings': dict({ + 'sensitivity1': 1, + 'sensitivity2': 1, + 'sensitivity3': 1, + 'zone_mask': 6, + }), + 'sensitivity': 5, + 'zone1': dict({ + 'name': 'Zone 1', + 'state': 2, + 'vertex1': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex2': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex3': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex4': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex5': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex6': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex7': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex8': dict({ + 'x': 0.0, + 'y': 0.0, + }), + }), + 'zone2': dict({ + 'name': 'Zone 2', + 'state': 2, + 'vertex1': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex2': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex3': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex4': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex5': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex6': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex7': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex8': dict({ + 'x': 0.0, + 'y': 0.0, + }), + }), + 'zone3': dict({ + 'name': 'Zone 3', + 'state': 2, + 'vertex1': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex2': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex3': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex4': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex5': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex6': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex7': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex8': dict({ + 'x': 0.0, + 'y': 0.0, + }), + }), + }), + 'pir_motion_zones': list([ + 0, + 1, + 1, + ]), + 'pir_settings': dict({ + 'sensitivity1': 1, + 'sensitivity2': 1, + 'sensitivity3': 1, + 'zone_mask': 6, + }), + 'stream_setting': 0, + 'video_settings': dict({ + 'ae_level': 0, + 'birton': None, + 'brightness': 0, + 'contrast': 64, + 'saturation': 80, + }), + }), + 'siren_status': dict({ + 'seconds_remaining': 0, + }), + 'stolen': False, + 'subscribed': True, + 'subscribed_motions': True, + 'time_zone': 'America/New_York', + }), + dict({ + 'address': '**REDACTED**', + 'alerts': dict({ + 'connection': 'online', + }), + 'battery_life': 80, + 'description': '**REDACTED**', + 'device_id': '**REDACTED**', + 'external_connection': False, + 'features': dict({ + 'advanced_motion_enabled': False, + 'motion_message_enabled': False, + 'motions_enabled': True, + 'night_vision_enabled': False, + 'people_only_enabled': False, + 'shadow_correction_enabled': False, + 'show_recordings': True, + }), + 'firmware_version': '1.9.3', + 'id': '**REDACTED**', + 'kind': 'hp_cam_v1', + 'latitude': '**REDACTED**', + 'led_status': 'on', + 'location_id': None, + 'longitude': '**REDACTED**', + 'motion_snooze': dict({ + 'scheduled': True, + }), + 'night_mode_status': 'false', + 'owned': True, + 'owner': dict({ + 'email': '**REDACTED**', + 'first_name': '**REDACTED**', + 'id': '**REDACTED**', + 'last_name': '**REDACTED**', + }), + 'ring_cam_light_installed': 'false', + 'ring_id': None, + 'settings': dict({ + 'chime_settings': dict({ + 'duration': 10, + 'enable': True, + 'type': 0, + }), + 'doorbell_volume': 11, + 'enable_vod': True, + 'floodlight_settings': dict({ + 'duration': 30, + 'priority': 0, + }), + 'light_schedule_settings': dict({ + 'end_hour': 0, + 'end_minute': 0, + 'start_hour': 0, + 'start_minute': 0, + }), + 'live_view_preset_profile': 'highest', + 'live_view_presets': list([ + 'low', + 'middle', + 'high', + 'highest', + ]), + 'motion_announcement': False, + 'motion_snooze_preset_profile': 'low', + 'motion_snooze_presets': list([ + 'none', + 'low', + 'medium', + 'high', + ]), + 'motion_zones': dict({ + 'active_motion_filter': 1, + 'advanced_object_settings': dict({ + 'human_detection_confidence': dict({ + 'day': 0.7, + 'night': 0.7, + }), + 'motion_zone_overlap': dict({ + 'day': 0.1, + 'night': 0.2, + }), + 'object_size_maximum': dict({ + 'day': 0.8, + 'night': 0.8, + }), + 'object_size_minimum': dict({ + 'day': 0.03, + 'night': 0.05, + }), + 'object_time_overlap': dict({ + 'day': 0.1, + 'night': 0.6, + }), + }), + 'enable_audio': False, + 'pir_settings': dict({ + 'sensitivity1': 1, + 'sensitivity2': 1, + 'sensitivity3': 1, + 'zone_mask': 6, + }), + 'sensitivity': 5, + 'zone1': dict({ + 'name': 'Zone 1', + 'state': 2, + 'vertex1': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex2': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex3': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex4': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex5': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex6': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex7': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex8': dict({ + 'x': 0.0, + 'y': 0.0, + }), + }), + 'zone2': dict({ + 'name': 'Zone 2', + 'state': 2, + 'vertex1': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex2': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex3': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex4': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex5': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex6': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex7': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex8': dict({ + 'x': 0.0, + 'y': 0.0, + }), + }), + 'zone3': dict({ + 'name': 'Zone 3', + 'state': 2, + 'vertex1': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex2': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex3': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex4': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex5': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex6': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex7': dict({ + 'x': 0.0, + 'y': 0.0, + }), + 'vertex8': dict({ + 'x': 0.0, + 'y': 0.0, + }), + }), + }), + 'pir_motion_zones': list([ + 0, + 1, + 1, + ]), + 'pir_settings': dict({ + 'sensitivity1': 1, + 'sensitivity2': 1, + 'sensitivity3': 1, + 'zone_mask': 6, + }), + 'stream_setting': 0, + 'video_settings': dict({ + 'ae_level': 0, + 'birton': None, + 'brightness': 0, + 'contrast': 64, + 'saturation': 80, + }), + }), + 'siren_status': dict({ + 'seconds_remaining': 30, + }), + 'stolen': False, + 'subscribed': True, + 'subscribed_motions': True, + 'time_zone': 'America/New_York', + }), + ]), + }) +# --- diff --git a/tests/components/ring/test_config_flow.py b/tests/components/ring/test_config_flow.py index 3e0c354e8fa..53c7e139a51 100644 --- a/tests/components/ring/test_config_flow.py +++ b/tests/components/ring/test_config_flow.py @@ -1,13 +1,23 @@ """Test the Ring config flow.""" -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock + +import pytest +import ring_doorbell from homeassistant import config_entries from homeassistant.components.ring import DOMAIN -from homeassistant.components.ring.config_flow import InvalidAuth +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry -async def test_form(hass: HomeAssistant) -> None: +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ring_auth: Mock, +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -16,20 +26,11 @@ async def test_form(hass: HomeAssistant) -> None: assert result["type"] == "form" assert result["errors"] == {} - with patch( - "homeassistant.components.ring.config_flow.Auth", - return_value=Mock( - fetch_token=Mock(return_value={"access_token": "mock-token"}) - ), - ), patch( - "homeassistant.components.ring.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "hello@home-assistant.io", "password": "test-password"}, - ) - await hass.async_block_till_done() + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "hello@home-assistant.io", "password": "test-password"}, + ) + await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == "hello@home-assistant.io" @@ -40,20 +41,181 @@ async def test_form(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_invalid_auth(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("error_type", "errors_msg"), + [ + (ring_doorbell.AuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], + ids=["invalid-auth", "unknown-error"], +) +async def test_form_error( + hass: HomeAssistant, mock_ring_auth: Mock, error_type, errors_msg +) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - - with patch( - "homeassistant.components.ring.config_flow.Auth.fetch_token", - side_effect=InvalidAuth, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "hello@home-assistant.io", "password": "test-password"}, - ) + mock_ring_auth.fetch_token.side_effect = error_type + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "hello@home-assistant.io", "password": "test-password"}, + ) assert result2["type"] == "form" - assert result2["errors"] == {"base": "invalid_auth"} + assert result2["errors"] == {"base": errors_msg} + + +async def test_form_2fa( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ring_auth: Mock, +) -> None: + """Test form flow for 2fa.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + + mock_ring_auth.fetch_token.side_effect = ring_doorbell.Requires2FAError + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "foo@bar.com", + CONF_PASSWORD: "fake-password", + }, + ) + await hass.async_block_till_done() + mock_ring_auth.fetch_token.assert_called_once_with( + "foo@bar.com", "fake-password", None + ) + + assert result2["type"] == FlowResultType.FORM + assert result2["step_id"] == "2fa" + mock_ring_auth.fetch_token.reset_mock(side_effect=True) + mock_ring_auth.fetch_token.return_value = "new-foobar" + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"2fa": "123456"}, + ) + + mock_ring_auth.fetch_token.assert_called_once_with( + "foo@bar.com", "fake-password", "123456" + ) + assert result3["type"] == FlowResultType.CREATE_ENTRY + assert result3["title"] == "foo@bar.com" + assert result3["data"] == { + "username": "foo@bar.com", + "token": "new-foobar", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_reauth( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_ring_auth: Mock, +) -> None: + """Test reauth flow.""" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + mock_ring_auth.fetch_token.side_effect = ring_doorbell.Requires2FAError + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "other_fake_password", + }, + ) + + mock_ring_auth.fetch_token.assert_called_once_with( + "foo@bar.com", "other_fake_password", None + ) + assert result2["type"] == FlowResultType.FORM + assert result2["step_id"] == "2fa" + mock_ring_auth.fetch_token.reset_mock(side_effect=True) + mock_ring_auth.fetch_token.return_value = "new-foobar" + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={"2fa": "123456"}, + ) + + mock_ring_auth.fetch_token.assert_called_once_with( + "foo@bar.com", "other_fake_password", "123456" + ) + assert result3["type"] == FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" + assert mock_added_config_entry.data == { + "username": "foo@bar.com", + "token": "new-foobar", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("error_type", "errors_msg"), + [ + (ring_doorbell.AuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], + ids=["invalid-auth", "unknown-error"], +) +async def test_reauth_error( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_ring_auth: Mock, + error_type, + errors_msg, +) -> None: + """Test reauth flow.""" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + mock_ring_auth.fetch_token.side_effect = error_type + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "error_fake_password", + }, + ) + await hass.async_block_till_done() + + mock_ring_auth.fetch_token.assert_called_once_with( + "foo@bar.com", "error_fake_password", None + ) + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": errors_msg} + + # Now test reauth can go on to succeed + mock_ring_auth.fetch_token.reset_mock(side_effect=True) + mock_ring_auth.fetch_token.return_value = "new-foobar" + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_PASSWORD: "other_fake_password", + }, + ) + + mock_ring_auth.fetch_token.assert_called_once_with( + "foo@bar.com", "other_fake_password", None + ) + assert result3["type"] == FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" + assert mock_added_config_entry.data == { + "username": "foo@bar.com", + "token": "new-foobar", + } + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/ring/test_diagnostics.py b/tests/components/ring/test_diagnostics.py new file mode 100644 index 00000000000..269446c3ad5 --- /dev/null +++ b/tests/components/ring/test_diagnostics.py @@ -0,0 +1,24 @@ +"""Test Ring diagnostics.""" + +import requests_mock +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + requests_mock: requests_mock.Mocker, + snapshot: SnapshotAssertion, +) -> None: + """Test Ring diagnostics.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + diag = await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + assert diag == snapshot diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index 7e3f5344f73..6ad79623a12 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -1,12 +1,20 @@ """The tests for the Ring component.""" +from datetime import timedelta +from unittest.mock import patch + +import pytest import requests_mock +from ring_doorbell import AuthenticationError, RingError, RingTimeout import homeassistant.components.ring as ring +from homeassistant.components.ring import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util -from tests.common import load_fixture +from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture async def test_setup(hass: HomeAssistant, requests_mock: requests_mock.Mocker) -> None: @@ -32,3 +40,152 @@ async def test_setup(hass: HomeAssistant, requests_mock: requests_mock.Mocker) - "https://api.ring.com/clients_api/doorbots/987652/health", text=load_fixture("doorboot_health_attrs.json", "ring"), ) + + +async def test_auth_failed_on_setup( + hass: HomeAssistant, + requests_mock: requests_mock.Mocker, + mock_config_entry: MockConfigEntry, +) -> None: + """Test auth failure on setup entry.""" + mock_config_entry.add_to_hass(hass) + with patch( + "ring_doorbell.Ring.update_data", + side_effect=AuthenticationError, + ): + assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_auth_failure_on_global_update( + hass: HomeAssistant, + requests_mock: requests_mock.Mocker, + mock_config_entry: MockConfigEntry, + caplog, +) -> None: + """Test authentication failure on global data update.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) + with patch( + "ring_doorbell.Ring.update_devices", + side_effect=AuthenticationError, + ): + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() + + assert "Ring access token is no longer valid, need to re-authenticate" in [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] + + assert any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) + + +async def test_auth_failure_on_device_update( + hass: HomeAssistant, + requests_mock: requests_mock.Mocker, + mock_config_entry: MockConfigEntry, + caplog, +) -> None: + """Test authentication failure on global data update.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) + with patch( + "ring_doorbell.RingDoorBell.history", + side_effect=AuthenticationError, + ): + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() + + assert "Ring access token is no longer valid, need to re-authenticate" in [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] + + assert any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) + + +@pytest.mark.parametrize( + ("error_type", "log_msg"), + [ + ( + RingTimeout, + "Time out fetching Ring device data", + ), + ( + RingError, + "Error fetching Ring device data: ", + ), + ], + ids=["timeout-error", "other-error"], +) +async def test_error_on_global_update( + hass: HomeAssistant, + requests_mock: requests_mock.Mocker, + mock_config_entry: MockConfigEntry, + caplog, + error_type, + log_msg, +) -> None: + """Test error on global data update.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + with patch( + "ring_doorbell.Ring.update_devices", + side_effect=error_type, + ): + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() + + assert log_msg in [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] + + assert mock_config_entry.entry_id in hass.data[DOMAIN] + + +@pytest.mark.parametrize( + ("error_type", "log_msg"), + [ + ( + RingTimeout, + "Time out fetching Ring history data for device aacdef123", + ), + ( + RingError, + "Error fetching Ring history data for device aacdef123: ", + ), + ], + ids=["timeout-error", "other-error"], +) +async def test_error_on_device_update( + hass: HomeAssistant, + requests_mock: requests_mock.Mocker, + mock_config_entry: MockConfigEntry, + caplog, + error_type, + log_msg, +) -> None: + """Test auth failure on data update.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + with patch( + "ring_doorbell.RingDoorBell.history", + side_effect=error_type, + ): + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20)) + await hass.async_block_till_done() + + assert log_msg in [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] + assert mock_config_entry.entry_id in hass.data[DOMAIN] diff --git a/tests/components/risco/conftest.py b/tests/components/risco/conftest.py index 325e787bb4f..a8a764cd502 100644 --- a/tests/components/risco/conftest.py +++ b/tests/components/risco/conftest.py @@ -140,7 +140,7 @@ async def setup_risco_cloud(hass, cloud_config_entry, events): "homeassistant.components.risco.RiscoCloud.site_name", new_callable=PropertyMock(return_value=TEST_SITE_NAME), ), patch( - "homeassistant.components.risco.RiscoCloud.close" + "homeassistant.components.risco.RiscoCloud.close", ), patch( "homeassistant.components.risco.RiscoCloud.get_events", return_value=events, @@ -191,7 +191,7 @@ async def setup_risco_local(hass, local_config_entry): "homeassistant.components.risco.RiscoLocal.id", new_callable=PropertyMock(return_value=TEST_SITE_UUID), ), patch( - "homeassistant.components.risco.RiscoLocal.disconnect" + "homeassistant.components.risco.RiscoLocal.disconnect", ): await hass.config_entries.async_setup(local_config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/risco/test_config_flow.py b/tests/components/risco/test_config_flow.py index fdb51c65dda..8207ad819b7 100644 --- a/tests/components/risco/test_config_flow.py +++ b/tests/components/risco/test_config_flow.py @@ -162,7 +162,7 @@ async def test_form_reauth(hass: HomeAssistant, cloud_config_entry) -> None: "homeassistant.components.risco.config_flow.RiscoCloud.site_name", new_callable=PropertyMock(return_value=TEST_SITE_NAME), ), patch( - "homeassistant.components.risco.config_flow.RiscoCloud.close" + "homeassistant.components.risco.config_flow.RiscoCloud.close", ), patch( "homeassistant.components.risco.async_setup_entry", return_value=True, @@ -198,7 +198,7 @@ async def test_form_reauth_with_new_username( "homeassistant.components.risco.config_flow.RiscoCloud.site_name", new_callable=PropertyMock(return_value=TEST_SITE_NAME), ), patch( - "homeassistant.components.risco.config_flow.RiscoCloud.close" + "homeassistant.components.risco.config_flow.RiscoCloud.close", ), patch( "homeassistant.components.risco.async_setup_entry", return_value=True, @@ -307,7 +307,7 @@ async def test_form_local_already_exists(hass: HomeAssistant) -> None: "homeassistant.components.risco.config_flow.RiscoLocal.id", new_callable=PropertyMock(return_value=TEST_SITE_NAME), ), patch( - "homeassistant.components.risco.config_flow.RiscoLocal.disconnect" + "homeassistant.components.risco.config_flow.RiscoLocal.disconnect", ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], TEST_LOCAL_DATA diff --git a/tests/components/roborock/conftest.py b/tests/components/roborock/conftest.py index 3435bd58cb3..711ae203e0f 100644 --- a/tests/components/roborock/conftest.py +++ b/tests/components/roborock/conftest.py @@ -12,7 +12,16 @@ from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .mock_data import BASE_URL, HOME_DATA, NETWORK_INFO, PROP, USER_DATA, USER_EMAIL +from .mock_data import ( + BASE_URL, + HOME_DATA, + MAP_DATA, + MULTI_MAP_LIST, + NETWORK_INFO, + PROP, + USER_DATA, + USER_EMAIL, +) from tests.common import MockConfigEntry @@ -33,6 +42,12 @@ def bypass_api_fixture() -> None: ), patch( "homeassistant.components.roborock.coordinator.RoborockLocalClient.get_prop", return_value=PROP, + ), patch( + "homeassistant.components.roborock.coordinator.RoborockMqttClient.get_multi_maps_list", + return_value=MULTI_MAP_LIST, + ), patch( + "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + return_value=MAP_DATA, ), patch( "homeassistant.components.roborock.coordinator.RoborockLocalClient.send_message" ), patch( @@ -40,9 +55,12 @@ def bypass_api_fixture() -> None: ), patch( "homeassistant.components.roborock.coordinator.RoborockLocalClient._wait_response" ), patch( - "roborock.api.AttributeCache.async_value" + "roborock.api.AttributeCache.async_value", ), patch( - "roborock.api.AttributeCache.value" + "roborock.api.AttributeCache.value", + ), patch( + "homeassistant.components.roborock.image.MAP_SLEEP", + 0, ): yield diff --git a/tests/components/roborock/mock_data.py b/tests/components/roborock/mock_data.py index 87ed02bc3ec..8935a77f142 100644 --- a/tests/components/roborock/mock_data.py +++ b/tests/components/roborock/mock_data.py @@ -1,17 +1,22 @@ """Mock data for Roborock tests.""" from __future__ import annotations +from PIL import Image from roborock.containers import ( CleanRecord, CleanSummary, Consumable, DnDTimer, HomeData, + MultiMapsList, NetworkInfo, S7Status, UserData, ) from roborock.roborock_typing import DeviceProp +from vacuum_map_parser_base.config.image_config import ImageConfig +from vacuum_map_parser_base.map_data import ImageData +from vacuum_map_parser_roborock.map_data_parser import MapData from homeassistant.components.roborock import CONF_BASE_URL, CONF_USER_DATA from homeassistant.const import CONF_USERNAME @@ -418,3 +423,32 @@ PROP = DeviceProp( NETWORK_INFO = NetworkInfo( ip="123.232.12.1", ssid="wifi", mac="ac:cc:cc:cc:cc", bssid="bssid", rssi=90 ) + +MULTI_MAP_LIST = MultiMapsList.from_dict( + { + "maxMultiMap": 4, + "maxBakMap": 1, + "multiMapCount": 2, + "mapInfo": [ + { + "mapFlag": 0, + "addTime": 1686235489, + "length": 8, + "name": "Upstairs", + "bakMaps": [{"addTime": 1673304288}], + }, + { + "mapFlag": 1, + "addTime": 1697579901, + "length": 10, + "name": "Downstairs", + "bakMaps": [{"addTime": 1695521431}], + }, + ], + } +) + +MAP_DATA = MapData(0, 0) +MAP_DATA.image = ImageData( + 100, 10, 10, 10, 10, ImageConfig(), Image.new("RGB", (1, 1)), lambda p: p +) diff --git a/tests/components/roborock/test_config_flow.py b/tests/components/roborock/test_config_flow.py index bbaa8935461..e2454b3ad57 100644 --- a/tests/components/roborock/test_config_flow.py +++ b/tests/components/roborock/test_config_flow.py @@ -1,4 +1,5 @@ """Test Roborock config flow.""" +from copy import deepcopy from unittest.mock import patch import pytest @@ -12,9 +13,11 @@ from roborock.exceptions import ( from homeassistant import config_entries from homeassistant.components.roborock.const import CONF_ENTRY_CODE, DOMAIN +from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from ...common import MockConfigEntry from .mock_data import MOCK_CONFIG, USER_DATA, USER_EMAIL @@ -35,7 +38,7 @@ async def test_config_flow_success( "homeassistant.components.roborock.config_flow.RoborockApiClient.request_code" ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"username": USER_EMAIL} + result["flow_id"], {CONF_USERNAME: USER_EMAIL} ) assert result["type"] == FlowResultType.FORM @@ -89,7 +92,7 @@ async def test_config_flow_failures_request_code( side_effect=request_code_side_effect, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"username": USER_EMAIL} + result["flow_id"], {CONF_USERNAME: USER_EMAIL} ) assert result["type"] == FlowResultType.FORM assert result["errors"] == request_code_errors @@ -98,7 +101,7 @@ async def test_config_flow_failures_request_code( "homeassistant.components.roborock.config_flow.RoborockApiClient.request_code" ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"username": USER_EMAIL} + result["flow_id"], {CONF_USERNAME: USER_EMAIL} ) assert result["type"] == FlowResultType.FORM @@ -149,7 +152,7 @@ async def test_config_flow_failures_code_login( "homeassistant.components.roborock.config_flow.RoborockApiClient.request_code" ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"username": USER_EMAIL} + result["flow_id"], {CONF_USERNAME: USER_EMAIL} ) assert result["type"] == FlowResultType.FORM @@ -178,3 +181,39 @@ async def test_config_flow_failures_code_login( assert result["data"] == MOCK_CONFIG assert result["result"] assert len(mock_setup.mock_calls) == 1 + + +async def test_reauth_flow( + hass: HomeAssistant, bypass_api_fixture, mock_roborock_entry: MockConfigEntry +) -> None: + """Test reauth flow.""" + # Start reauth + result = mock_roborock_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + # Request a new code + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.request_code" + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + # Enter a new code + assert result["step_id"] == "code" + assert result["type"] == FlowResultType.FORM + new_user_data = deepcopy(USER_DATA) + new_user_data.rriot.s = "new_password_hash" + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.code_login", + return_value=new_user_data, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_ENTRY_CODE: "123456"} + ) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_roborock_entry.data["user_data"]["rriot"]["s"] == "new_password_hash" diff --git a/tests/components/roborock/test_image.py b/tests/components/roborock/test_image.py new file mode 100644 index 00000000000..80d4bd37337 --- /dev/null +++ b/tests/components/roborock/test_image.py @@ -0,0 +1,75 @@ +"""Test Roborock Image platform.""" +import copy +from datetime import timedelta +from http import HTTPStatus +from unittest.mock import patch + +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.components.roborock.mock_data import MAP_DATA, PROP +from tests.typing import ClientSessionGenerator + + +async def test_floorplan_image( + hass: HomeAssistant, + setup_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test floor plan map image is correctly set up.""" + # Setup calls the image parsing the first time and caches it. + assert len(hass.states.async_all("image")) == 4 + + assert hass.states.get("image.roborock_s7_maxv_upstairs") is not None + # call a second time -should return cached data + client = await hass_client() + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert resp.status == HTTPStatus.OK + body = await resp.read() + assert body is not None + # Call a third time - this time forcing it to update + now = dt_util.utcnow() + timedelta(seconds=91) + async_fire_time_changed(hass, now) + # Copy the device prop so we don't override it + prop = copy.deepcopy(PROP) + prop.status.in_cleaning = 1 + with patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClient.get_prop", + return_value=prop, + ), patch( + "homeassistant.components.roborock.image.dt_util.utcnow", return_value=now + ): + await hass.async_block_till_done() + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert resp.status == HTTPStatus.OK + body = await resp.read() + assert body is not None + + +async def test_floorplan_image_failed_parse( + hass: HomeAssistant, + setup_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test that we correctly handle getting None from the image parser.""" + client = await hass_client() + map_data = copy.deepcopy(MAP_DATA) + map_data.image = None + now = dt_util.utcnow() + timedelta(seconds=91) + async_fire_time_changed(hass, now) + # Copy the device prop so we don't override it + prop = copy.deepcopy(PROP) + prop.status.in_cleaning = 1 + # Update image, but get none for parse image. + with patch( + "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + return_value=map_data, + ), patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClient.get_prop", + return_value=prop, + ), patch( + "homeassistant.components.roborock.image.dt_util.utcnow", return_value=now + ): + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert not resp.ok diff --git a/tests/components/roborock/test_init.py b/tests/components/roborock/test_init.py index a5ad24b431c..5d1afaf8f84 100644 --- a/tests/components/roborock/test_init.py +++ b/tests/components/roborock/test_init.py @@ -1,10 +1,11 @@ """Test for Roborock init.""" from unittest.mock import patch +from roborock import RoborockException, RoborockInvalidCredentials + from homeassistant.components.roborock.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -33,8 +34,89 @@ async def test_config_entry_not_ready( with patch( "homeassistant.components.roborock.RoborockApiClient.get_home_data", ), patch( - "homeassistant.components.roborock.RoborockDataUpdateCoordinator._async_update_data", - side_effect=UpdateFailed(), + "homeassistant.components.roborock.coordinator.RoborockLocalClient.get_prop", + side_effect=RoborockException(), ): await async_setup_component(hass, DOMAIN, {}) assert mock_roborock_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_config_entry_not_ready_home_data( + hass: HomeAssistant, mock_roborock_entry: MockConfigEntry +) -> None: + """Test that when we fail to get home data, entry retries.""" + with patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data", + side_effect=RoborockException(), + ), patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClient.get_prop", + side_effect=RoborockException(), + ): + await async_setup_component(hass, DOMAIN, {}) + assert mock_roborock_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_get_networking_fails( + hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture +) -> None: + """Test that when networking fails, we attempt to retry.""" + with patch( + "homeassistant.components.roborock.RoborockMqttClient.get_networking", + side_effect=RoborockException(), + ): + await async_setup_component(hass, DOMAIN, {}) + assert mock_roborock_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_get_networking_fails_none( + hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture +) -> None: + """Test that when networking returns None, we attempt to retry.""" + with patch( + "homeassistant.components.roborock.RoborockMqttClient.get_networking", + return_value=None, + ): + await async_setup_component(hass, DOMAIN, {}) + assert mock_roborock_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_cloud_client_fails_props( + hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture +) -> None: + """Test that if networking succeeds, but we can't communicate with the vacuum, we can't get props, fail.""" + with patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClient.ping", + side_effect=RoborockException(), + ), patch( + "homeassistant.components.roborock.coordinator.RoborockMqttClient.get_prop", + side_effect=RoborockException(), + ): + await async_setup_component(hass, DOMAIN, {}) + assert mock_roborock_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_local_client_fails_props( + hass: HomeAssistant, mock_roborock_entry: MockConfigEntry, bypass_api_fixture +) -> None: + """Test that if networking succeeds, but we can't communicate locally with the vacuum, we can't get props, fail.""" + with patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClient.get_prop", + side_effect=RoborockException(), + ): + await async_setup_component(hass, DOMAIN, {}) + assert mock_roborock_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_reauth_started( + hass: HomeAssistant, bypass_api_fixture, mock_roborock_entry: MockConfigEntry +) -> None: + """Test reauth flow started.""" + with patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data", + side_effect=RoborockInvalidCredentials(), + ): + await async_setup_component(hass, DOMAIN, {}) + assert mock_roborock_entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" diff --git a/tests/components/roborock/test_sensor.py b/tests/components/roborock/test_sensor.py index 35fcc9478cd..4966c8fa3be 100644 --- a/tests/components/roborock/test_sensor.py +++ b/tests/components/roborock/test_sensor.py @@ -1,14 +1,20 @@ """Test Roborock Sensors.""" +from unittest.mock import patch +from roborock import DeviceData, HomeDataDevice +from roborock.cloud_api import RoborockMqttClient from roborock.const import ( FILTER_REPLACE_TIME, MAIN_BRUSH_REPLACE_TIME, SENSOR_DIRTY_REPLACE_TIME, SIDE_BRUSH_REPLACE_TIME, ) +from roborock.roborock_message import RoborockMessage, RoborockMessageProtocol from homeassistant.core import HomeAssistant +from .mock_data import CONSUMABLE, STATUS, USER_DATA + from tests.common import MockConfigEntry @@ -47,3 +53,41 @@ async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> Non hass.states.get("sensor.roborock_s7_maxv_last_clean_end").state == "2023-01-01T03:43:58+00:00" ) + + +async def test_listener_update( + hass: HomeAssistant, setup_entry: MockConfigEntry +) -> None: + """Test that when we receive a mqtt topic, we successfully update the entity.""" + assert hass.states.get("sensor.roborock_s7_maxv_status").state == "charging" + # Listeners are global based on uuid - so this is okay + client = RoborockMqttClient( + USER_DATA, DeviceData(device=HomeDataDevice("abc123", "", "", "", ""), model="") + ) + # Test Status + with patch("roborock.api.AttributeCache.value", STATUS.as_dict()): + # Symbolizes a mqtt message coming in + client.on_message_received( + [ + RoborockMessage( + protocol=RoborockMessageProtocol.GENERAL_REQUEST, + payload=b'{"t": 1699464794, "dps": {"121": 5}}', + ) + ] + ) + # Test consumable + assert hass.states.get("sensor.roborock_s7_maxv_filter_time_left").state == str( + FILTER_REPLACE_TIME - 74382 + ) + with patch("roborock.api.AttributeCache.value", CONSUMABLE.as_dict()): + client.on_message_received( + [ + RoborockMessage( + protocol=RoborockMessageProtocol.GENERAL_REQUEST, + payload=b'{"t": 1699464794, "dps": {"127": 743}}', + ) + ] + ) + assert hass.states.get("sensor.roborock_s7_maxv_filter_time_left").state == str( + FILTER_REPLACE_TIME - 743 + ) diff --git a/tests/components/samsungtv/conftest.py b/tests/components/samsungtv/conftest.py index 5e8ab9311aa..874697bf777 100644 --- a/tests/components/samsungtv/conftest.py +++ b/tests/components/samsungtv/conftest.py @@ -45,9 +45,9 @@ async def silent_ssdp_scanner(hass): ), patch("homeassistant.components.ssdp.Scanner._async_stop_ssdp_listeners"), patch( "homeassistant.components.ssdp.Scanner.async_scan" ), patch( - "homeassistant.components.ssdp.Server._async_start_upnp_servers" + "homeassistant.components.ssdp.Server._async_start_upnp_servers", ), patch( - "homeassistant.components.ssdp.Server._async_stop_upnp_servers" + "homeassistant.components.ssdp.Server._async_stop_upnp_servers", ): yield diff --git a/tests/components/schlage/conftest.py b/tests/components/schlage/conftest.py index 7b610a6b4da..5f9676b7d09 100644 --- a/tests/components/schlage/conftest.py +++ b/tests/components/schlage/conftest.py @@ -54,14 +54,14 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_schlage(): +def mock_schlage() -> Mock: """Mock pyschlage.Schlage.""" with patch("pyschlage.Schlage", autospec=True) as mock_schlage: yield mock_schlage.return_value @pytest.fixture -def mock_pyschlage_auth(): +def mock_pyschlage_auth() -> Mock: """Mock pyschlage.Auth.""" with patch("pyschlage.Auth", autospec=True) as mock_auth: mock_auth.return_value.user_id = "abc123" @@ -69,7 +69,7 @@ def mock_pyschlage_auth(): @pytest.fixture -def mock_lock(): +def mock_lock() -> Mock: """Mock Lock fixture.""" mock_lock = create_autospec(Lock) mock_lock.configure_mock( diff --git a/tests/components/schlage/test_config_flow.py b/tests/components/schlage/test_config_flow.py index b256e8950ed..14121f5d9ca 100644 --- a/tests/components/schlage/test_config_flow.py +++ b/tests/components/schlage/test_config_flow.py @@ -9,6 +9,8 @@ from homeassistant.components.schlage.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -78,3 +80,94 @@ async def test_form_unknown(hass: HomeAssistant, mock_pyschlage_auth: Mock) -> N assert result2["type"] == FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + + +async def test_reauth( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_pyschlage_auth: Mock, +) -> None: + """Test reauth flow.""" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"password": "new-password"}, + ) + await hass.async_block_till_done() + + mock_pyschlage_auth.authenticate.assert_called_once_with() + assert result2["type"] == FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert mock_added_config_entry.data == { + "username": "asdf@asdf.com", + "password": "new-password", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_reauth_invalid_auth( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_pyschlage_auth: Mock, +) -> None: + """Test reauth flow.""" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + mock_pyschlage_auth.authenticate.reset_mock() + mock_pyschlage_auth.authenticate.side_effect = NotAuthorizedError + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"password": "new-password"}, + ) + await hass.async_block_till_done() + + mock_pyschlage_auth.authenticate.assert_called_once_with() + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} + + +async def test_reauth_wrong_account( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_pyschlage_auth: Mock, +) -> None: + """Test reauth flow.""" + mock_pyschlage_auth.user_id = "bad-user-id" + mock_added_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"password": "new-password"}, + ) + await hass.async_block_till_done() + + mock_pyschlage_auth.authenticate.assert_called_once_with() + assert result2["type"] == FlowResultType.ABORT + assert result2["reason"] == "wrong_account" + assert mock_added_config_entry.data == { + "username": "asdf@asdf.com", + "password": "hunter2", + } + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/schlage/test_init.py b/tests/components/schlage/test_init.py index 0811d87ec80..0fe7af1982b 100644 --- a/tests/components/schlage/test_init.py +++ b/tests/components/schlage/test_init.py @@ -3,7 +3,7 @@ from unittest.mock import Mock, patch from pycognito.exceptions import WarrantException -from pyschlage.exceptions import Error +from pyschlage.exceptions import Error, NotAuthorizedError from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -43,6 +43,41 @@ async def test_update_data_fails( assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY +async def test_update_data_auth_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_pyschlage_auth: Mock, + mock_schlage: Mock, +) -> None: + """Test that we properly handle API errors.""" + mock_schlage.locks.side_effect = NotAuthorizedError + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_schlage.locks.call_count == 1 + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_update_data_get_logs_auth_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_pyschlage_auth: Mock, + mock_schlage: Mock, + mock_lock: Mock, +) -> None: + """Test that we properly handle API errors.""" + mock_schlage.locks.return_value = [mock_lock] + mock_lock.logs.reset_mock() + mock_lock.logs.side_effect = NotAuthorizedError + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_schlage.locks.call_count == 1 + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + async def test_load_unload_config_entry( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/sensibo/test_button.py b/tests/components/sensibo/test_button.py index da6a68af2d1..2277c84d187 100644 --- a/tests/components/sensibo/test_button.py +++ b/tests/components/sensibo/test_button.py @@ -100,7 +100,7 @@ async def test_button_failure( "homeassistant.components.sensibo.util.SensiboClient.async_reset_filter", return_value={"status": "failure"}, ), pytest.raises( - HomeAssistantError + HomeAssistantError, ): await hass.services.async_call( BUTTON_DOMAIN, diff --git a/tests/components/sensibo/test_climate.py b/tests/components/sensibo/test_climate.py index 530034720f2..9cf0a8972a9 100644 --- a/tests/components/sensibo/test_climate.py +++ b/tests/components/sensibo/test_climate.py @@ -742,7 +742,7 @@ async def test_climate_set_timer( "homeassistant.components.sensibo.util.SensiboClient.async_set_timer", return_value={"status": "failure"}, ), pytest.raises( - MultipleInvalid + MultipleInvalid, ): await hass.services.async_call( DOMAIN, @@ -761,7 +761,7 @@ async def test_climate_set_timer( "homeassistant.components.sensibo.util.SensiboClient.async_set_timer", return_value={"status": "failure"}, ), pytest.raises( - HomeAssistantError + HomeAssistantError, ): await hass.services.async_call( DOMAIN, @@ -845,7 +845,7 @@ async def test_climate_pure_boost( ), patch( "homeassistant.components.sensibo.util.SensiboClient.async_set_pureboost", ), pytest.raises( - MultipleInvalid + MultipleInvalid, ): await hass.services.async_call( DOMAIN, @@ -947,7 +947,7 @@ async def test_climate_climate_react( ), patch( "homeassistant.components.sensibo.util.SensiboClient.async_set_climate_react", ), pytest.raises( - MultipleInvalid + MultipleInvalid, ): await hass.services.async_call( DOMAIN, @@ -1254,7 +1254,7 @@ async def test_climate_full_ac_state( ), patch( "homeassistant.components.sensibo.util.SensiboClient.async_set_ac_states", ), pytest.raises( - MultipleInvalid + MultipleInvalid, ): await hass.services.async_call( DOMAIN, diff --git a/tests/components/sensibo/test_select.py b/tests/components/sensibo/test_select.py index 7d8e3731415..41a67dfbe79 100644 --- a/tests/components/sensibo/test_select.py +++ b/tests/components/sensibo/test_select.py @@ -90,7 +90,7 @@ async def test_select_set_option( "homeassistant.components.sensibo.util.SensiboClient.async_set_ac_state_property", return_value={"result": {"status": "failed"}}, ), pytest.raises( - HomeAssistantError + HomeAssistantError, ): await hass.services.async_call( SELECT_DOMAIN, @@ -132,7 +132,7 @@ async def test_select_set_option( "homeassistant.components.sensibo.util.SensiboClient.async_set_ac_state_property", return_value={"result": {"status": "Failed", "failureReason": "No connection"}}, ), pytest.raises( - HomeAssistantError + HomeAssistantError, ): await hass.services.async_call( SELECT_DOMAIN, diff --git a/tests/components/sensibo/test_switch.py b/tests/components/sensibo/test_switch.py index c6d47ceed66..e319be85c73 100644 --- a/tests/components/sensibo/test_switch.py +++ b/tests/components/sensibo/test_switch.py @@ -196,7 +196,7 @@ async def test_switch_command_failure( "homeassistant.components.sensibo.util.SensiboClient.async_set_timer", return_value={"status": "failure"}, ), pytest.raises( - HomeAssistantError + HomeAssistantError, ): await hass.services.async_call( SWITCH_DOMAIN, @@ -214,7 +214,7 @@ async def test_switch_command_failure( "homeassistant.components.sensibo.util.SensiboClient.async_del_timer", return_value={"status": "failure"}, ), pytest.raises( - HomeAssistantError + HomeAssistantError, ): await hass.services.async_call( SWITCH_DOMAIN, diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index 464118ac99b..0384e9255a3 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -7,6 +7,7 @@ from datetime import timedelta from typing import Any from unittest.mock import Mock +from aioshelly.const import MODEL_25 from freezegun.api import FrozenDateTimeFactory import pytest @@ -30,7 +31,7 @@ MOCK_MAC = "123456789ABC" async def init_integration( hass: HomeAssistant, gen: int, - model="SHSW-25", + model=MODEL_25, sleep_period=0, options: dict[str, Any] | None = None, skip_setup: bool = False, diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index 438ca9b5ace..6eb74e26dcb 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -4,6 +4,7 @@ from __future__ import annotations from unittest.mock import AsyncMock, Mock, PropertyMock, patch from aioshelly.block_device import BlockDevice, BlockUpdateType +from aioshelly.const import MODEL_1, MODEL_25, MODEL_PLUS_2PM from aioshelly.rpc_device import RpcDevice, RpcUpdateType import pytest @@ -22,7 +23,7 @@ MOCK_SETTINGS = { "device": { "mac": MOCK_MAC, "hostname": "test-host", - "type": "SHSW-25", + "type": MODEL_25, "num_outputs": 2, }, "coiot": {"update_period": 15}, @@ -148,6 +149,11 @@ MOCK_CONFIG = { "light:0": {"name": "test light_0"}, "switch:0": {"name": "test switch_0"}, "cover:0": {"name": "test cover_0"}, + "thermostat:0": { + "id": 0, + "enable": True, + "type": "heating", + }, "sys": { "ui_data": {}, "device": {"name": "Test name"}, @@ -166,7 +172,7 @@ MOCK_SHELLY_RPC = { "name": "Test Gen2", "id": "shellyplus2pm-123456789abc", "mac": MOCK_MAC, - "model": "SNSW-002P16EU", + "model": MODEL_PLUS_2PM, "gen": 2, "fw_id": "20220830-130540/0.11.0-gfa1bc37", "ver": "0.11.0", @@ -174,6 +180,7 @@ MOCK_SHELLY_RPC = { "auth_en": False, "auth_domain": None, "profile": "cover", + "relay_in_thermostat": True, } MOCK_STATUS_COAP = { @@ -207,6 +214,13 @@ MOCK_STATUS_RPC = { "em1:1": {"act_power": 123.3}, "em1data:0": {"total_act_energy": 123456.4}, "em1data:1": {"total_act_energy": 987654.3}, + "thermostat:0": { + "id": 0, + "enable": True, + "target_C": 23, + "current_C": 12.3, + "output": True, + }, "sys": { "available_updates": { "beta": {"version": "some_beta_version"}, @@ -280,7 +294,8 @@ async def mock_block_device(): status=MOCK_STATUS_COAP, firmware_version="some fw string", initialized=True, - model="SHSW-1", + model=MODEL_1, + gen=1, ) type(device).name = PropertyMock(return_value="Test name") block_device_mock.return_value = device diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 8905ff5c3e8..8a5e0108ad7 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -1,4 +1,5 @@ """Tests for Shelly binary sensor platform.""" +from aioshelly.const import MODEL_MOTION from freezegun.api import FrozenDateTimeFactory from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN @@ -77,9 +78,9 @@ async def test_block_rest_binary_sensor_connected_battery_devices( """Test block REST binary sensor for connected battery devices.""" entity_id = register_entity(hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud") monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) - monkeypatch.setitem(mock_block_device.settings["device"], "type", "SHMOS-01") + monkeypatch.setitem(mock_block_device.settings["device"], "type", MODEL_MOTION) monkeypatch.setitem(mock_block_device.settings["coiot"], "update_period", 3600) - await init_integration(hass, 1, model="SHMOS-01") + await init_integration(hass, 1, model=MODEL_MOTION) assert hass.states.get(entity_id).state == STATE_OFF diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index 08ec548d3f0..fe518b8509c 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -1,10 +1,14 @@ """Tests for Shelly climate platform.""" +from copy import deepcopy from unittest.mock import AsyncMock, PropertyMock +from aioshelly.const import MODEL_VALVE from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_ACTION, ATTR_HVAC_MODE, ATTR_PRESET_MODE, ATTR_TARGET_TEMP_HIGH, @@ -14,13 +18,15 @@ from homeassistant.components.climate import ( SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_TEMPERATURE, + HVACAction, HVACMode, ) -from homeassistant.components.shelly.const import DOMAIN +from homeassistant.components.shelly.const import DOMAIN, MODEL_WALL_DISPLAY from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er import homeassistant.helpers.issue_registry as ir from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM @@ -49,7 +55,7 @@ async def test_climate_hvac_mode( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) monkeypatch.delattr(mock_block_device.blocks[EMETER_BLOCK_ID], "targetTemp") monkeypatch.delattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "targetTemp") - await init_integration(hass, 1, sleep_period=1000, model="SHTRV-01") + await init_integration(hass, 1, sleep_period=1000, model=MODEL_VALVE) # Make device online mock_block_device.mock_update() @@ -150,7 +156,7 @@ async def test_climate_set_preset_mode( monkeypatch.delattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "mode", None) - await init_integration(hass, 1, sleep_period=1000, model="SHTRV-01") + await init_integration(hass, 1, sleep_period=1000, model=MODEL_VALVE) # Make device online mock_block_device.mock_update() @@ -502,7 +508,7 @@ async def test_device_not_calibrated( """Test to create an issue when the device is not calibrated.""" issue_registry: ir.IssueRegistry = ir.async_get(hass) - await init_integration(hass, 1, sleep_period=1000, model="SHTRV-01") + await init_integration(hass, 1, sleep_period=1000, model=MODEL_VALVE) # Make device online mock_block_device.mock_update() @@ -534,3 +540,97 @@ async def test_device_not_calibrated( assert not issue_registry.async_get_issue( domain=DOMAIN, issue_id=f"not_calibrated_{MOCK_MAC}" ) + + +async def test_rpc_climate_hvac_mode( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device, + monkeypatch, +) -> None: + """Test climate hvac mode service.""" + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + + state = hass.states.get(ENTITY_ID) + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_TEMPERATURE] == 23 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + + entry = entity_registry.async_get(ENTITY_ID) + assert entry + assert entry.unique_id == "123456789ABC-thermostat:0" + + monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "output", False) + mock_rpc_device.mock_update() + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + + monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "enable", False) + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + mock_rpc_device.mock_update() + + mock_rpc_device.call_rpc.assert_called_once_with( + "Thermostat.SetConfig", {"config": {"id": 0, "enable": False}} + ) + state = hass.states.get(ENTITY_ID) + assert state.state == HVACMode.OFF + + +async def test_rpc_climate_set_temperature( + hass: HomeAssistant, mock_rpc_device, monkeypatch +) -> None: + """Test climate set target temperature.""" + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_TEMPERATURE] == 23 + + # test set temperature without target temperature + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_TARGET_TEMP_LOW: 20, + ATTR_TARGET_TEMP_HIGH: 30, + }, + blocking=True, + ) + mock_rpc_device.call_rpc.assert_not_called() + + monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "target_C", 28) + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 28}, + blocking=True, + ) + mock_rpc_device.mock_update() + + mock_rpc_device.call_rpc.assert_called_once_with( + "Thermostat.SetConfig", {"config": {"id": 0, "target_C": 28}} + ) + state = hass.states.get(ENTITY_ID) + assert state.attributes[ATTR_TEMPERATURE] == 28 + + +async def test_rpc_climate_hvac_mode_cool( + hass: HomeAssistant, mock_rpc_device, monkeypatch +) -> None: + """Test climate with hvac mode cooling.""" + new_config = deepcopy(mock_rpc_device.config) + new_config["thermostat:0"]["type"] = "cooling" + monkeypatch.setattr(mock_rpc_device, "config", new_config) + + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + + state = hass.states.get(ENTITY_ID) + assert state.state == HVACMode.COOL + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 073847e0308..9482080a1a3 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -5,6 +5,7 @@ from dataclasses import replace from ipaddress import ip_address from unittest.mock import AsyncMock, patch +from aioshelly.const import MODEL_1, MODEL_PLUS_2PM from aioshelly.exceptions import ( DeviceConnectionError, FirmwareUnsupported, @@ -52,8 +53,8 @@ DISCOVERY_INFO_WITH_MAC = zeroconf.ZeroconfServiceInfo( @pytest.mark.parametrize( ("gen", "model"), [ - (1, "SHSW-1"), - (2, "SNSW-002P16EU"), + (1, MODEL_1), + (2, MODEL_PLUS_2PM), ], ) async def test_form( @@ -68,7 +69,7 @@ async def test_form( with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False, "gen": gen}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": False, "gen": gen}, ), patch( "homeassistant.components.shelly.async_setup", return_value=True ) as mock_setup, patch( @@ -98,13 +99,13 @@ async def test_form( [ ( 1, - "SHSW-1", + MODEL_1, {"username": "test user", "password": "test1 password"}, "test user", ), ( 2, - "SNSW-002P16EU", + MODEL_PLUS_2PM, {"password": "test2 password"}, "admin", ), @@ -128,7 +129,7 @@ async def test_form_auth( with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": True, "gen": gen}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -306,7 +307,7 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": False}, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -339,7 +340,7 @@ async def test_user_setup_ignored_device( with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": False}, ), patch( "homeassistant.components.shelly.async_setup", return_value=True ) as mock_setup, patch( @@ -456,13 +457,13 @@ async def test_form_auth_errors_test_connection_gen2( [ ( 1, - "SHSW-1", - {"mac": "test-mac", "type": "SHSW-1", "auth": False, "gen": 1}, + MODEL_1, + {"mac": "test-mac", "type": MODEL_1, "auth": False, "gen": 1}, ), ( 2, - "SNSW-002P16EU", - {"mac": "test-mac", "model": "SHSW-1", "auth": False, "gen": 2}, + MODEL_PLUS_2PM, + {"mac": "test-mac", "model": MODEL_PLUS_2PM, "auth": False, "gen": 2}, ), ], ) @@ -525,7 +526,7 @@ async def test_zeroconf_sleeping_device( "homeassistant.components.shelly.config_flow.get_info", return_value={ "mac": "test-mac", - "type": "SHSW-1", + "type": MODEL_1, "auth": False, "sleep_mode": True, }, @@ -559,7 +560,7 @@ async def test_zeroconf_sleeping_device( assert result2["title"] == "Test name" assert result2["data"] == { "host": "1.1.1.1", - "model": "SHSW-1", + "model": MODEL_1, "sleep_period": 600, "gen": 1, } @@ -573,7 +574,7 @@ async def test_zeroconf_sleeping_device_error(hass: HomeAssistant) -> None: "homeassistant.components.shelly.config_flow.get_info", return_value={ "mac": "test-mac", - "type": "SHSW-1", + "type": MODEL_1, "auth": False, "sleep_mode": True, }, @@ -600,7 +601,7 @@ async def test_zeroconf_already_configured(hass: HomeAssistant) -> None: with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -627,7 +628,7 @@ async def test_zeroconf_ignored(hass: HomeAssistant) -> None: with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -648,7 +649,7 @@ async def test_zeroconf_with_wifi_ap_ip(hass: HomeAssistant) -> None: with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -700,7 +701,7 @@ async def test_zeroconf_require_auth(hass: HomeAssistant, mock_block_device) -> with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": True}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": True}, ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -726,7 +727,7 @@ async def test_zeroconf_require_auth(hass: HomeAssistant, mock_block_device) -> assert result2["title"] == "Test name" assert result2["data"] == { "host": "1.1.1.1", - "model": "SHSW-1", + "model": MODEL_1, "sleep_period": 0, "gen": 1, "username": "test username", @@ -754,7 +755,7 @@ async def test_reauth_successful( with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": True, "gen": gen}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -790,7 +791,7 @@ async def test_reauth_unsuccessful(hass: HomeAssistant, gen, user_input) -> None with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": "SHSW-1", "auth": True, "gen": gen}, + return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ), patch( "aioshelly.block_device.BlockDevice.create", new=AsyncMock(side_effect=InvalidAuthError), @@ -1029,7 +1030,7 @@ async def test_zeroconf_already_configured_triggers_refresh_mac_in_name( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 0, "model": "SHSW-1"}, + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 0, "model": MODEL_1}, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1038,7 +1039,7 @@ async def test_zeroconf_already_configured_triggers_refresh_mac_in_name( with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "", "type": "SHSW-1", "auth": False}, + return_value={"mac": "", "type": MODEL_1, "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -1061,7 +1062,7 @@ async def test_zeroconf_already_configured_triggers_refresh( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 0, "model": "SHSW-1"}, + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 0, "model": MODEL_1}, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1070,7 +1071,7 @@ async def test_zeroconf_already_configured_triggers_refresh( with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "AABBCCDDEEFF", "type": "SHSW-1", "auth": False}, + return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -1093,7 +1094,7 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": "SHSW-1"}, + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1105,7 +1106,7 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "AABBCCDDEEFF", "type": "SHSW-1", "auth": False}, + return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -1148,7 +1149,7 @@ async def test_sleeping_device_gen2_with_new_firmware( assert result["data"] == { "host": "1.1.1.1", - "model": "SNSW-002P16EU", + "model": MODEL_PLUS_2PM, "sleep_period": 666, "gen": 2, } diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 8ce80b70032..e73168c6b20 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -2,6 +2,7 @@ from datetime import timedelta from unittest.mock import AsyncMock, patch +from aioshelly.const import MODEL_BULB, MODEL_BUTTON1 from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError from freezegun.api import FrozenDateTimeFactory @@ -79,7 +80,7 @@ async def test_block_no_reload_on_bulb_changes( hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_block_device, monkeypatch ) -> None: """Test block no reload on bulb mode/effect change.""" - await init_integration(hass, 1, model="SHBLB-1") + await init_integration(hass, 1, model=MODEL_BULB) monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "cfgChanged", 1) mock_block_device.mock_update() @@ -284,7 +285,7 @@ async def test_block_button_click_event( "sensor_ids", {"inputEvent": "S", "inputEventCnt": 0}, ) - entry = await init_integration(hass, 1, model="SHBTN-1", sleep_period=1000) + entry = await init_integration(hass, 1, model=MODEL_BUTTON1, sleep_period=1000) # Make device online mock_block_device.mock_update() diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index 143501ef620..9a63e66980a 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -1,4 +1,5 @@ """The tests for Shelly device triggers.""" +from aioshelly.const import MODEL_BUTTON1 import pytest from pytest_unordered import unordered @@ -108,7 +109,7 @@ async def test_get_triggers_rpc_device(hass: HomeAssistant, mock_rpc_device) -> async def test_get_triggers_button(hass: HomeAssistant, mock_block_device) -> None: """Test we get the expected triggers from a shelly button.""" - entry = await init_integration(hass, 1, model="SHBTN-1") + entry = await init_integration(hass, 1, model=MODEL_BUTTON1) dev_reg = async_get_dev_reg(hass) device = async_entries_for_config_entry(dev_reg, entry.entry_id)[0] diff --git a/tests/components/shelly/test_diagnostics.py b/tests/components/shelly/test_diagnostics.py index 39f1ef8d723..13126db0a0e 100644 --- a/tests/components/shelly/test_diagnostics.py +++ b/tests/components/shelly/test_diagnostics.py @@ -2,6 +2,7 @@ from unittest.mock import ANY from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT +from aioshelly.const import MODEL_25 from homeassistant.components.diagnostics import REDACTED from homeassistant.components.shelly.const import ( @@ -40,7 +41,7 @@ async def test_block_config_entry_diagnostics( "bluetooth": "not initialized", "device_info": { "name": "Test name", - "model": "SHSW-25", + "model": MODEL_25, "sw_version": "some fw string", }, "device_settings": {"coiot": {"update_period": 15}}, @@ -136,7 +137,7 @@ async def test_rpc_config_entry_diagnostics( }, "device_info": { "name": "Test name", - "model": "SHSW-25", + "model": MODEL_25, "sw_version": "some fw string", }, "device_settings": {}, diff --git a/tests/components/shelly/test_event.py b/tests/components/shelly/test_event.py index b7824d8d7ac..09439adc6f7 100644 --- a/tests/components/shelly/test_event.py +++ b/tests/components/shelly/test_event.py @@ -1,6 +1,7 @@ """Tests for Shelly button platform.""" from __future__ import annotations +from aioshelly.const import MODEL_I3 from pytest_unordered import unordered from homeassistant.components.event import ( @@ -104,7 +105,7 @@ async def test_block_event(hass: HomeAssistant, monkeypatch, mock_block_device) async def test_block_event_shix3_1(hass: HomeAssistant, mock_block_device) -> None: """Test block device event for SHIX3-1.""" - await init_integration(hass, 1, model="SHIX3-1") + await init_integration(hass, 1, model=MODEL_I3) entity_id = "event.test_name_channel_1" state = hass.states.get(entity_id) diff --git a/tests/components/shelly/test_light.py b/tests/components/shelly/test_light.py index 69d0fccf421..e3aea966230 100644 --- a/tests/components/shelly/test_light.py +++ b/tests/components/shelly/test_light.py @@ -1,4 +1,13 @@ """Tests for Shelly light platform.""" +from aioshelly.const import ( + MODEL_BULB, + MODEL_BULB_RGBW, + MODEL_DIMMER, + MODEL_DIMMER_2, + MODEL_DUO, + MODEL_RGBW2, + MODEL_VINTAGE_V2, +) import pytest from homeassistant.components.light import ( @@ -33,7 +42,7 @@ LIGHT_BLOCK_ID = 2 async def test_block_device_rgbw_bulb(hass: HomeAssistant, mock_block_device) -> None: """Test block device RGBW bulb.""" - await init_integration(hass, 1, model="SHBLB-1") + await init_integration(hass, 1, model=MODEL_BULB) # Test initial state = hass.states.get("light.test_name_channel_1") @@ -113,7 +122,7 @@ async def test_block_device_rgb_bulb( ) -> None: """Test block device RGB bulb.""" monkeypatch.delattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "mode") - await init_integration(hass, 1, model="SHCB-1") + await init_integration(hass, 1, model=MODEL_BULB_RGBW) # Test initial state = hass.states.get("light.test_name_channel_1") @@ -215,7 +224,7 @@ async def test_block_device_white_bulb( monkeypatch.delattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "mode") monkeypatch.delattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "colorTemp") monkeypatch.delattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "effect") - await init_integration(hass, 1, model="SHVIN-1") + await init_integration(hass, 1, model=MODEL_VINTAGE_V2) # Test initial state = hass.states.get("light.test_name_channel_1") @@ -259,12 +268,12 @@ async def test_block_device_white_bulb( @pytest.mark.parametrize( "model", [ - "SHBDUO-1", - "SHCB-1", - "SHDM-1", - "SHDM-2", - "SHRGBW2", - "SHVIN-1", + MODEL_DUO, + MODEL_BULB_RGBW, + MODEL_DIMMER, + MODEL_DIMMER_2, + MODEL_RGBW2, + MODEL_VINTAGE_V2, ], ) async def test_block_device_support_transition( diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 115ad5edabb..e19416706e1 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -1,10 +1,13 @@ """Tests for Shelly switch platform.""" +from copy import deepcopy from unittest.mock import AsyncMock +from aioshelly.const import MODEL_GAS from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError import pytest -from homeassistant.components.shelly.const import DOMAIN +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN +from homeassistant.components.shelly.const import DOMAIN, MODEL_WALL_DISPLAY from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( @@ -19,7 +22,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import init_integration +from . import init_integration, register_entity RELAY_BLOCK_ID = 0 GAS_VALVE_BLOCK_ID = 6 @@ -236,7 +239,7 @@ async def test_block_device_gas_valve( ) -> None: """Test block device Shelly Gas with Valve addon.""" registry = er.async_get(hass) - await init_integration(hass, 1, "SHGS-1") + await init_integration(hass, 1, MODEL_GAS) entity_id = "switch.test_name_valve" entry = registry.async_get(entity_id) @@ -277,3 +280,39 @@ async def test_block_device_gas_valve( assert state assert state.state == STATE_ON # valve is open assert state.attributes.get(ATTR_ICON) == "mdi:valve-open" + + +async def test_wall_display_thermostat_mode( + hass: HomeAssistant, + mock_rpc_device, +) -> None: + """Test Wall Display in thermostat mode.""" + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + + # the switch entity should not be created, only the climate entity + assert hass.states.get("switch.test_name") is None + assert hass.states.get("climate.test_name") + + +async def test_wall_display_relay_mode( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device, + monkeypatch, +) -> None: + """Test Wall Display in thermostat mode.""" + entity_id = register_entity( + hass, + CLIMATE_DOMAIN, + "test_name", + "thermostat:0", + ) + + new_shelly = deepcopy(mock_rpc_device.shelly) + new_shelly["relay_in_thermostat"] = False + monkeypatch.setattr(mock_rpc_device, "shelly", new_shelly) + + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + + # the climate entity should be removed + assert hass.states.get(entity_id) is None diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 454afb73ce1..06eac49e293 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -5,11 +5,16 @@ from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCal from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.shelly.const import DOMAIN +from homeassistant.components.shelly.const import ( + DOMAIN, + GEN1_RELEASE_URL, + GEN2_RELEASE_URL, +) from homeassistant.components.update import ( ATTR_IN_PROGRESS, ATTR_INSTALLED_VERSION, ATTR_LATEST_VERSION, + ATTR_RELEASE_URL, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, UpdateEntityFeature, @@ -75,6 +80,7 @@ async def test_block_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is True + assert state.attributes[ATTR_RELEASE_URL] == GEN1_RELEASE_URL monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2") await mock_rest_update(hass, freezer) @@ -117,6 +123,7 @@ async def test_block_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_RELEASE_URL] is None await hass.services.async_call( UPDATE_DOMAIN, @@ -270,6 +277,7 @@ async def test_rpc_update(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] == 0 + assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL inject_rpc_device_event( monkeypatch, @@ -341,6 +349,7 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) + assert state.attributes[ATTR_RELEASE_URL] == GEN2_RELEASE_URL monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2") mock_rpc_device.mock_update() @@ -467,6 +476,7 @@ async def test_rpc_beta_update( assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "1" assert state.attributes[ATTR_IN_PROGRESS] is False + assert state.attributes[ATTR_RELEASE_URL] is None monkeypatch.setitem( mock_rpc_device.status["sys"], diff --git a/tests/components/shelly/test_utils.py b/tests/components/shelly/test_utils.py index 3d273ff3059..e47f9e451b4 100644 --- a/tests/components/shelly/test_utils.py +++ b/tests/components/shelly/test_utils.py @@ -1,12 +1,26 @@ """Tests for Shelly utils.""" +from aioshelly.const import ( + MODEL_1, + MODEL_1L, + MODEL_BUTTON1, + MODEL_BUTTON1_V2, + MODEL_DIMMER_2, + MODEL_EM3, + MODEL_I3, + MODEL_MOTION, + MODEL_PLUS_2PM_V2, + MODEL_WALL_DISPLAY, +) import pytest +from homeassistant.components.shelly.const import GEN1_RELEASE_URL, GEN2_RELEASE_URL from homeassistant.components.shelly.utils import ( get_block_channel_name, get_block_device_sleep_period, get_block_input_triggers, get_device_uptime, get_number_of_channels, + get_release_url, get_rpc_channel_name, get_rpc_input_triggers, is_block_momentary_input, @@ -39,7 +53,7 @@ async def test_block_get_number_of_channels(mock_block_device, monkeypatch) -> N == 4 ) - monkeypatch.setitem(mock_block_device.settings["device"], "type", "SHDM-2") + monkeypatch.setitem(mock_block_device.settings["device"], "type", MODEL_DIMMER_2) assert ( get_number_of_channels( mock_block_device, @@ -61,7 +75,7 @@ async def test_block_get_block_channel_name(mock_block_device, monkeypatch) -> N == "Test name channel 1" ) - monkeypatch.setitem(mock_block_device.settings["device"], "type", "SHEM-3") + monkeypatch.setitem(mock_block_device.settings["device"], "type", MODEL_EM3) assert ( get_block_channel_name( @@ -107,7 +121,7 @@ async def test_is_block_momentary_input(mock_block_device, monkeypatch) -> None: ) monkeypatch.setitem(mock_block_device.settings, "mode", "relay") - monkeypatch.setitem(mock_block_device.settings["device"], "type", "SHSW-L") + monkeypatch.setitem(mock_block_device.settings["device"], "type", MODEL_1L) assert ( is_block_momentary_input( mock_block_device.settings, mock_block_device.blocks[DEVICE_BLOCK_ID], True @@ -125,7 +139,7 @@ async def test_is_block_momentary_input(mock_block_device, monkeypatch) -> None: is False ) - monkeypatch.setitem(mock_block_device.settings["device"], "type", "SHBTN-2") + monkeypatch.setitem(mock_block_device.settings["device"], "type", MODEL_BUTTON1_V2) assert ( is_block_momentary_input( @@ -177,7 +191,7 @@ async def test_get_block_input_triggers(mock_block_device, monkeypatch) -> None: ) ) == {("long", "button"), ("single", "button")} - monkeypatch.setitem(mock_block_device.settings["device"], "type", "SHBTN-1") + monkeypatch.setitem(mock_block_device.settings["device"], "type", MODEL_BUTTON1) assert set( get_block_input_triggers( mock_block_device, mock_block_device.blocks[DEVICE_BLOCK_ID] @@ -189,7 +203,7 @@ async def test_get_block_input_triggers(mock_block_device, monkeypatch) -> None: ("triple", "button"), } - monkeypatch.setitem(mock_block_device.settings["device"], "type", "SHIX3-1") + monkeypatch.setitem(mock_block_device.settings["device"], "type", MODEL_I3) assert set( get_block_input_triggers( mock_block_device, mock_block_device.blocks[DEVICE_BLOCK_ID] @@ -224,3 +238,23 @@ async def test_get_rpc_input_triggers(mock_rpc_device, monkeypatch) -> None: monkeypatch.setattr(mock_rpc_device, "config", {"input:0": {"type": "switch"}}) assert not get_rpc_input_triggers(mock_rpc_device) + + +@pytest.mark.parametrize( + ("gen", "model", "beta", "expected"), + [ + (1, MODEL_MOTION, False, None), + (1, MODEL_1, False, GEN1_RELEASE_URL), + (1, MODEL_1, True, None), + (2, MODEL_WALL_DISPLAY, False, None), + (2, MODEL_PLUS_2PM_V2, False, GEN2_RELEASE_URL), + (2, MODEL_PLUS_2PM_V2, True, None), + ], +) +def test_get_release_url( + gen: int, model: str, beta: bool, expected: str | None +) -> None: + """Test get_release_url() with a device without a release note URL.""" + result = get_release_url(gen, model, beta) + + assert result is expected diff --git a/tests/components/shopping_list/test_todo.py b/tests/components/shopping_list/test_todo.py index 681ccea60ac..7722bd8b6da 100644 --- a/tests/components/shopping_list/test_todo.py +++ b/tests/components/shopping_list/test_todo.py @@ -13,39 +13,22 @@ from tests.typing import WebSocketGenerator TEST_ENTITY = "todo.shopping_list" -@pytest.fixture -def ws_req_id() -> Callable[[], int]: - """Fixture for incremental websocket requests.""" - - id = 0 - - def next() -> int: - nonlocal id - id += 1 - return id - - return next - - @pytest.fixture async def ws_get_items( - hass_ws_client: WebSocketGenerator, ws_req_id: Callable[[], int] + hass_ws_client: WebSocketGenerator, ) -> Callable[[], Awaitable[dict[str, str]]]: """Fixture to fetch items from the todo websocket.""" async def get() -> list[dict[str, str]]: # Fetch items using To-do platform client = await hass_ws_client() - id = ws_req_id() - await client.send_json( + await client.send_json_auto_id( { - "id": id, "type": "todo/item/list", "entity_id": TEST_ENTITY, } ) resp = await client.receive_json() - assert resp.get("id") == id assert resp.get("success") return resp.get("result", {}).get("items", []) @@ -55,25 +38,21 @@ async def ws_get_items( @pytest.fixture async def ws_move_item( hass_ws_client: WebSocketGenerator, - ws_req_id: Callable[[], int], ) -> Callable[[str, str | None], Awaitable[None]]: """Fixture to move an item in the todo list.""" async def move(uid: str, previous_uid: str | None) -> dict[str, Any]: # Fetch items using To-do platform client = await hass_ws_client() - id = ws_req_id() data = { - "id": id, "type": "todo/item/move", "entity_id": TEST_ENTITY, "uid": uid, } if previous_uid is not None: data["previous_uid"] = previous_uid - await client.send_json(data) + await client.send_json_auto_id(data) resp = await client.receive_json() - assert resp.get("id") == id return resp return move @@ -83,7 +62,6 @@ async def test_get_items( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, sl_setup: None, - ws_req_id: Callable[[], int], ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test creating a shopping list item with the WS API and verifying with To-do API.""" @@ -94,9 +72,7 @@ async def test_get_items( assert state.state == "0" # Native shopping list websocket - await client.send_json( - {"id": ws_req_id(), "type": "shopping_list/items/add", "name": "soda"} - ) + await client.send_json_auto_id({"type": "shopping_list/items/add", "name": "soda"}) msg = await client.receive_json() assert msg["success"] is True data = msg["result"] @@ -117,7 +93,6 @@ async def test_get_items( async def test_add_item( hass: HomeAssistant, sl_setup: None, - ws_req_id: Callable[[], int], ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test adding shopping_list item and listing it.""" @@ -145,7 +120,6 @@ async def test_add_item( async def test_remove_item( hass: HomeAssistant, sl_setup: None, - ws_req_id: Callable[[], int], ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test removing a todo item.""" @@ -187,7 +161,6 @@ async def test_remove_item( async def test_bulk_remove( hass: HomeAssistant, sl_setup: None, - ws_req_id: Callable[[], int], ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test removing a todo item.""" @@ -232,7 +205,6 @@ async def test_bulk_remove( async def test_update_item( hass: HomeAssistant, sl_setup: None, - ws_req_id: Callable[[], int], ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test updating a todo item.""" @@ -286,7 +258,6 @@ async def test_update_item( async def test_partial_update_item( hass: HomeAssistant, sl_setup: None, - ws_req_id: Callable[[], int], ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test updating a todo item with partial information.""" @@ -363,7 +334,6 @@ async def test_partial_update_item( async def test_update_invalid_item( hass: HomeAssistant, sl_setup: None, - ws_req_id: Callable[[], int], ws_get_items: Callable[[], Awaitable[dict[str, str]]], ) -> None: """Test updating a todo item that does not exist.""" @@ -410,7 +380,6 @@ async def test_update_invalid_item( async def test_move_item( hass: HomeAssistant, sl_setup: None, - ws_req_id: Callable[[], int], ws_get_items: Callable[[], Awaitable[dict[str, str]]], ws_move_item: Callable[[str, str | None], Awaitable[dict[str, Any]]], src_idx: int, @@ -475,3 +444,69 @@ async def test_move_invalid_item( assert not resp.get("success") assert resp.get("error", {}).get("code") == "failed" assert "could not be re-ordered" in resp.get("error", {}).get("message") + + +async def test_subscribe_item( + hass: HomeAssistant, + sl_setup: None, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test updating a todo item.""" + + # Create new item + await hass.services.async_call( + TODO_DOMAIN, + "add_item", + { + "item": "soda", + }, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + # Subscribe and get the initial list + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "todo/item/subscribe", + "entity_id": TEST_ENTITY, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + subscription_id = msg["id"] + + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "soda" + assert items[0]["status"] == "needs_action" + uid = items[0]["uid"] + assert uid + + # Rename item item completed + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + { + "item": "soda", + "rename": "milk", + }, + target={"entity_id": TEST_ENTITY}, + blocking=True, + ) + + # Verify update is published + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "milk" + assert items[0]["status"] == "needs_action" + assert "uid" in items[0] diff --git a/tests/components/simplisafe/conftest.py b/tests/components/simplisafe/conftest.py index 4b8686d7a7f..1b9f9f02cee 100644 --- a/tests/components/simplisafe/conftest.py +++ b/tests/components/simplisafe/conftest.py @@ -106,7 +106,8 @@ async def setup_simplisafe_fixture(hass, api, config): ), patch( "homeassistant.components.simplisafe.SimpliSafe._async_start_websocket_loop" ), patch( - "homeassistant.components.simplisafe.PLATFORMS", [] + "homeassistant.components.simplisafe.PLATFORMS", + [], ): assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() diff --git a/tests/components/simplisafe/test_init.py b/tests/components/simplisafe/test_init.py index 617b77f7c98..cc7b2b8d2b6 100644 --- a/tests/components/simplisafe/test_init.py +++ b/tests/components/simplisafe/test_init.py @@ -34,7 +34,8 @@ async def test_base_station_migration( ), patch( "homeassistant.components.simplisafe.SimpliSafe._async_start_websocket_loop" ), patch( - "homeassistant.components.simplisafe.PLATFORMS", [] + "homeassistant.components.simplisafe.PLATFORMS", + [], ): assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() diff --git a/tests/components/skybell/__init__.py b/tests/components/skybell/__init__.py index fc049adcc3d..ae9b6d132e4 100644 --- a/tests/components/skybell/__init__.py +++ b/tests/components/skybell/__init__.py @@ -1,12 +1 @@ """Tests for the SkyBell integration.""" - -from homeassistant.const import CONF_EMAIL, CONF_PASSWORD - -USERNAME = "user" -PASSWORD = "password" -USER_ID = "123456789012345678901234" - -CONF_CONFIG_FLOW = { - CONF_EMAIL: USERNAME, - CONF_PASSWORD: PASSWORD, -} diff --git a/tests/components/skybell/conftest.py b/tests/components/skybell/conftest.py index 4318fa8c24f..beb3fec9b98 100644 --- a/tests/components/skybell/conftest.py +++ b/tests/components/skybell/conftest.py @@ -1,11 +1,28 @@ -"""Test setup for the SkyBell integration.""" - +"""Configure pytest for Skybell tests.""" from unittest.mock import AsyncMock, patch from aioskybell import Skybell, SkybellDevice +from aioskybell.helpers.const import BASE_URL, USERS_ME_URL +import orjson import pytest -from . import USER_ID +from homeassistant.components.skybell.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker + +USERNAME = "user" +PASSWORD = "password" +USER_ID = "1234567890abcdef12345678" +DEVICE_ID = "012345670123456789abcdef" + +CONF_DATA = { + CONF_EMAIL: USERNAME, + CONF_PASSWORD: PASSWORD, +} @pytest.fixture(autouse=True) @@ -23,3 +40,88 @@ def skybell_mock(): return_value=mocked_skybell, ), patch("homeassistant.components.skybell.Skybell", return_value=mocked_skybell): yield mocked_skybell + + +def create_entry(hass: HomeAssistant) -> MockConfigEntry: + """Create fixture for adding config entry in Home Assistant.""" + entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) + entry.add_to_hass(hass) + return entry + + +async def set_aioclient_responses(aioclient_mock: AiohttpClientMocker) -> None: + """Set AioClient responses.""" + aioclient_mock.get( + f"{BASE_URL}devices/{DEVICE_ID}/info/", + text=load_fixture("skybell/device_info.json"), + ) + aioclient_mock.get( + f"{BASE_URL}devices/{DEVICE_ID}/settings/", + text=load_fixture("skybell/device_settings.json"), + ) + aioclient_mock.get( + f"{BASE_URL}devices/{DEVICE_ID}/activities/", + text=load_fixture("skybell/activities.json"), + ) + aioclient_mock.get( + f"{BASE_URL}devices/", + text=load_fixture("skybell/device.json"), + ) + aioclient_mock.get( + USERS_ME_URL, + text=load_fixture("skybell/me.json"), + ) + aioclient_mock.post( + f"{BASE_URL}login/", + text=load_fixture("skybell/login.json"), + ) + aioclient_mock.get( + f"{BASE_URL}devices/{DEVICE_ID}/activities/1234567890ab1234567890ac/video/", + text=load_fixture("skybell/video.json"), + ) + aioclient_mock.get( + f"{BASE_URL}devices/{DEVICE_ID}/avatar/", + text=load_fixture("skybell/avatar.json"), + ) + aioclient_mock.get( + f"https://v3-production-devices-avatar.s3.us-west-2.amazonaws.com/{DEVICE_ID}.jpg", + ) + aioclient_mock.get( + f"https://skybell-thumbnails-stage.s3.amazonaws.com/{DEVICE_ID}/1646859244793-951{DEVICE_ID}_{DEVICE_ID}.jpeg", + ) + + +@pytest.fixture +async def connection(aioclient_mock: AiohttpClientMocker) -> None: + """Fixture for good connection responses.""" + await set_aioclient_responses(aioclient_mock) + + +def create_skybell(hass: HomeAssistant) -> Skybell: + """Create Skybell object.""" + skybell = Skybell( + username=USERNAME, + password=PASSWORD, + get_devices=True, + session=async_get_clientsession(hass), + ) + skybell._cache = orjson.loads(load_fixture("skybell/cache.json")) + return skybell + + +def mock_skybell(hass: HomeAssistant): + """Mock Skybell object.""" + return patch( + "homeassistant.components.skybell.Skybell", return_value=create_skybell(hass) + ) + + +async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry: + """Set up the Skybell integration in Home Assistant.""" + config_entry = create_entry(hass) + + with mock_skybell(hass), patch("aioskybell.utils.async_save_cache"): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/skybell/fixtures/activities.json b/tests/components/skybell/fixtures/activities.json new file mode 100644 index 00000000000..4ed5c027821 --- /dev/null +++ b/tests/components/skybell/fixtures/activities.json @@ -0,0 +1,30 @@ +[ + { + "videoState": "download:ready", + "_id": "1234567890ab1234567890ab", + "device": "0123456789abcdef01234567", + "callId": "1234567890123-1234567890abcd1234567890abcd", + "event": "device:sensor:motion", + "state": "ready", + "ttlStartDate": "2020-03-30T12:35:02.204Z", + "createdAt": "2020-03-30T12:35:02.204Z", + "updatedAt": "2020-03-30T12:35:02.566Z", + "id": "1234567890ab1234567890ab", + "media": "https://skybell-thumbnails-stage.s3.amazonaws.com/012345670123456789abcdef/1646859244793-951012345670123456789abcdef_012345670123456789abcdef.jpeg", + "mediaSmall": "https://skybell-thumbnails-stage.s3.amazonaws.com/012345670123456789abcdef/1646859244793-951012345670123456789abcdef_012345670123456789abcdef_small.jpeg" + }, + { + "videoState": "download:ready", + "_id": "1234567890ab1234567890a9", + "device": "0123456789abcdef01234567", + "callId": "1234567890123-1234567890abcd1234567890abc9", + "event": "application:on-demand", + "state": "ready", + "ttlStartDate": "2020-03-30T11:35:02.204Z", + "createdAt": "2020-03-30T11:35:02.204Z", + "updatedAt": "2020-03-30T11:35:02.566Z", + "id": "1234567890ab1234567890a9", + "media": "https://skybell-thumbnails-stage.s3.amazonaws.com/012345670123456789abcdef/1646859244793-951012345670123456789abcdef_012345670123456789abcde9.jpeg", + "mediaSmall": "https://skybell-thumbnails-stage.s3.amazonaws.com/012345670123456789abcdef/1646859244793-951012345670123456789abcdef_012345670123456789abcde9_small.jpeg" + } +] diff --git a/tests/components/skybell/fixtures/avatar.json b/tests/components/skybell/fixtures/avatar.json new file mode 100644 index 00000000000..3f8157c15c8 --- /dev/null +++ b/tests/components/skybell/fixtures/avatar.json @@ -0,0 +1,4 @@ +{ + "createdAt": "2020-03-31T04:13:48.640Z", + "url": "https://v3-production-devices-avatar.s3.us-west-2.amazonaws.com/012345670123456789abcdef.jpg" +} diff --git a/tests/components/skybell/fixtures/cache.json b/tests/components/skybell/fixtures/cache.json new file mode 100644 index 00000000000..1276c2cfc0f --- /dev/null +++ b/tests/components/skybell/fixtures/cache.json @@ -0,0 +1,40 @@ +{ + "app_id": "secret", + "client_id": "secret", + "token": "secret", + "access_token": "secret", + "devices": { + "5f8ef594362f31000833d959": { + "event": { + "device:sensor:motion": { + "videoState": "download:ready", + "_id": "1234567890ab1234567890ab", + "device": "0123456789abcdef01234567", + "callId": "1234567890123-1234567890abcd1234567890abcd", + "event": "device:sensor:motion", + "state": "ready", + "ttlStartDate": "2020-03-30T12:35:02.204Z", + "createdAt": "2020-03-30T12:35:02.204Z", + "updatedAt": "2020-03-30T12:35:02.566Z", + "id": "1234567890ab1234567890ab", + "media": "https://skybell-thumbnails-stage.s3.amazonaws.com/012345670123456789abcdef/1646859244793-951012345670123456789abcdef_012345670123456789abcdef.jpeg", + "mediaSmall": "https://skybell-thumbnails-stage.s3.amazonaws.com/012345670123456789abcdef/1646859244793-951012345670123456789abcdef_012345670123456789abcdef_small.jpeg" + }, + "device:sensor:button": { + "videoState": "download:ready", + "_id": "1234567890ab1234567890a9", + "device": "0123456789abcdef01234567", + "callId": "1234567890123-1234567890abcd1234567890abc9", + "event": "application:on-demand", + "state": "ready", + "ttlStartDate": "2020-03-30T11:35:02.204Z", + "createdAt": "2020-03-30T11:35:02.204Z", + "updatedAt": "2020-03-30T11:35:02.566Z", + "id": "1234567890ab1234567890a9", + "media": "https://skybell-thumbnails-stage.s3.amazonaws.com/012345670123456789abcdef/1646859244793-951012345670123456789abcdef_012345670123456789abcde9.jpeg", + "mediaSmall": "https://skybell-thumbnails-stage.s3.amazonaws.com/012345670123456789abcdef/1646859244793-951012345670123456789abcdef_012345670123456789abcde9_small.jpeg" + } + } + } + } +} diff --git a/tests/components/skybell/fixtures/device.json b/tests/components/skybell/fixtures/device.json new file mode 100644 index 00000000000..7b522aa687d --- /dev/null +++ b/tests/components/skybell/fixtures/device.json @@ -0,0 +1,19 @@ +[ + { + "user": "0123456789abcdef01234567", + "uuid": "0123456789", + "resourceId": "012345670123456789abcdef", + "deviceInviteToken": "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + "location": { + "lat": "-1.0", + "lng": "1.0" + }, + "name": "Front Door", + "type": "skybell hd", + "status": "up", + "createdAt": "2020-10-20T14:35:00.745Z", + "updatedAt": "2020-10-20T14:35:00.745Z", + "id": "012345670123456789abcdef", + "acl": "owner" + } +] diff --git a/tests/components/skybell/fixtures/device_info.json b/tests/components/skybell/fixtures/device_info.json new file mode 100644 index 00000000000..d858bb20e36 --- /dev/null +++ b/tests/components/skybell/fixtures/device_info.json @@ -0,0 +1,25 @@ +{ + "essid": "wifi", + "wifiBitrate": "39", + "proxy_port": "5683", + "wifiLinkQuality": "43", + "port": "5683", + "mac": "ff:ff:ff:ff:ff:ff", + "serialNo": "0123456789", + "wifiTxPwrEeprom": "12", + "region": "us-west-2", + "hardwareRevision": "SKYBELL_TRIMPLUS_1000030-F", + "proxy_address": "34.209.204.201", + "wifiSignalLevel": "-67", + "localHostname": "ip-10-0-0-67.us-west-2.compute.internal", + "wifiNoise": "0", + "address": "1.2.3.4", + "clientId": "1234567890abcdef1234567890abcdef1234567890abcdef", + "timestamp": "60000000000", + "deviceId": "01234567890abcdef1234567", + "firmwareVersion": "7082", + "checkedInAt": "2020-03-31T04:13:37.000Z", + "status": { + "wifiLink": "poor" + } +} diff --git a/tests/components/skybell/fixtures/device_settings.json b/tests/components/skybell/fixtures/device_settings.json new file mode 100644 index 00000000000..46af5f0bd4b --- /dev/null +++ b/tests/components/skybell/fixtures/device_settings.json @@ -0,0 +1,22 @@ +{ + "ring_tone": "0", + "do_not_ring": "false", + "do_not_disturb": "false", + "digital_doorbell": "false", + "video_profile": "1", + "mic_volume": "63", + "speaker_volume": "96", + "chime_level": "1", + "motion_threshold": "32", + "low_lux_threshold": "50", + "med_lux_threshold": "150", + "high_lux_threshold": "400", + "low_front_led_dac": "10", + "med_front_led_dac": "10", + "high_front_led_dac": "10", + "green_r": "0", + "green_g": "0", + "green_b": "255", + "led_intensity": "0", + "motion_policy": "call" +} diff --git a/tests/components/skybell/fixtures/device_settings_change.json b/tests/components/skybell/fixtures/device_settings_change.json new file mode 100644 index 00000000000..6e2c8dd199b --- /dev/null +++ b/tests/components/skybell/fixtures/device_settings_change.json @@ -0,0 +1,22 @@ +{ + "ring_tone": "0", + "do_not_ring": "false", + "do_not_disturb": "false", + "digital_doorbell": "false", + "video_profile": "1", + "mic_volume": "63", + "speaker_volume": "96", + "chime_level": "1", + "motion_threshold": "32", + "low_lux_threshold": "50", + "med_lux_threshold": "150", + "high_lux_threshold": "400", + "low_front_led_dac": "10", + "med_front_led_dac": "10", + "high_front_led_dac": "10", + "green_r": "10", + "green_g": "125", + "green_b": "255", + "led_intensity": "50", + "motion_policy": "disabled" +} diff --git a/tests/components/skybell/fixtures/login.json b/tests/components/skybell/fixtures/login.json new file mode 100644 index 00000000000..c7eaa44b5ab --- /dev/null +++ b/tests/components/skybell/fixtures/login.json @@ -0,0 +1,10 @@ +{ + "firstName": "John", + "lastName": "Doe", + "resourceId": "0123456789abcdef01234567", + "createdAt": "2018-07-06T02:02:14.050Z", + "updatedAt": "2018-07-06T02:02:14.050Z", + "id": "0123456789abcdef01234567", + "userLinks": [], + "access_token": "superlongkey" +} diff --git a/tests/components/skybell/fixtures/login_401.json b/tests/components/skybell/fixtures/login_401.json new file mode 100644 index 00000000000..ab6bfd7053c --- /dev/null +++ b/tests/components/skybell/fixtures/login_401.json @@ -0,0 +1,5 @@ +{ + "errors": { + "message": "Invalid Login - SmartAuth" + } +} diff --git a/tests/components/skybell/fixtures/me.json b/tests/components/skybell/fixtures/me.json new file mode 100644 index 00000000000..7b27c95ec01 --- /dev/null +++ b/tests/components/skybell/fixtures/me.json @@ -0,0 +1,9 @@ +{ + "firstName": "First", + "lastName": "Last", + "resourceId": "123456789012345678901234", + "createdAt": "2018-10-06T02:02:14.050Z", + "updatedAt": "2018-10-06T02:02:14.050Z", + "id": "1234567890abcdef12345678", + "userLinks": [] +} diff --git a/tests/components/skybell/fixtures/video.json b/tests/components/skybell/fixtures/video.json new file mode 100644 index 00000000000..e674df1c9c8 --- /dev/null +++ b/tests/components/skybell/fixtures/video.json @@ -0,0 +1,3 @@ +{ + "url": "https://production-video-download.s3.us-west-2.amazonaws.com/012345670123456789abcdef/1654307756676-0123456789120123456789abcdef_012345670123456789abcdef.mp4?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=01234567890123456789%2F20203030%2Fus-west-2%2Fs3%2Faws4_request&X-Amz-Date=20200330T201225Z&X-Amz-Expires=300&X-Amz-Signature=0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef&X-Amz-SignedHeaders=host" +} diff --git a/tests/components/skybell/test_binary_sensor.py b/tests/components/skybell/test_binary_sensor.py new file mode 100644 index 00000000000..8e0bc884730 --- /dev/null +++ b/tests/components/skybell/test_binary_sensor.py @@ -0,0 +1,18 @@ +"""Binary sensor tests for the Skybell integration.""" +from homeassistant.components.binary_sensor import BinarySensorDeviceClass +from homeassistant.const import ATTR_DEVICE_CLASS, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant + +from .conftest import async_init_integration + + +async def test_binary_sensors(hass: HomeAssistant, connection) -> None: + """Test we get sensor data.""" + await async_init_integration(hass) + + state = hass.states.get("binary_sensor.front_door_button") + assert state.state == STATE_OFF + assert state.attributes.get(ATTR_DEVICE_CLASS) == BinarySensorDeviceClass.OCCUPANCY + state = hass.states.get("binary_sensor.front_door_motion") + assert state.state == STATE_ON + assert state.attributes.get(ATTR_DEVICE_CLASS) == BinarySensorDeviceClass.MOTION diff --git a/tests/components/skybell/test_config_flow.py b/tests/components/skybell/test_config_flow.py index f93c1d6ae4f..d83f4243d7f 100644 --- a/tests/components/skybell/test_config_flow.py +++ b/tests/components/skybell/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import CONF_CONFIG_FLOW, PASSWORD, USER_ID +from .conftest import CONF_DATA, PASSWORD, USER_ID from tests.common import MockConfigEntry @@ -37,12 +37,12 @@ async def test_flow_user(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=CONF_CONFIG_FLOW, + user_input=CONF_DATA, ) assert result["type"] == FlowResultType.CREATE_ENTRY assert result["title"] == "user" - assert result["data"] == CONF_CONFIG_FLOW + assert result["data"] == CONF_DATA assert result["result"].unique_id == USER_ID @@ -50,12 +50,12 @@ async def test_flow_user_already_configured(hass: HomeAssistant) -> None: """Test user initialized flow with duplicate server.""" entry = MockConfigEntry( domain=DOMAIN, - data=CONF_CONFIG_FLOW, + data=CONF_DATA, ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=CONF_CONFIG_FLOW + DOMAIN, context={"source": SOURCE_USER}, data=CONF_DATA ) assert result["type"] == FlowResultType.ABORT @@ -66,7 +66,7 @@ async def test_flow_user_cannot_connect(hass: HomeAssistant, skybell_mock) -> No """Test user initialized flow with unreachable server.""" skybell_mock.async_initialize.side_effect = exceptions.SkybellException(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=CONF_CONFIG_FLOW + DOMAIN, context={"source": SOURCE_USER}, data=CONF_DATA ) assert result["type"] == FlowResultType.FORM assert result["step_id"] == "user" @@ -79,7 +79,7 @@ async def test_invalid_credentials(hass: HomeAssistant, skybell_mock) -> None: exceptions.SkybellAuthenticationException(hass) ) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=CONF_CONFIG_FLOW + DOMAIN, context={"source": SOURCE_USER}, data=CONF_DATA ) assert result["type"] == FlowResultType.FORM @@ -91,7 +91,7 @@ async def test_flow_user_unknown_error(hass: HomeAssistant, skybell_mock) -> Non """Test user initialized flow with unreachable server.""" skybell_mock.async_initialize.side_effect = Exception result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER}, data=CONF_CONFIG_FLOW + DOMAIN, context={"source": SOURCE_USER}, data=CONF_DATA ) assert result["type"] == FlowResultType.FORM assert result["step_id"] == "user" @@ -100,7 +100,7 @@ async def test_flow_user_unknown_error(hass: HomeAssistant, skybell_mock) -> Non async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth flow.""" - entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_CONFIG_FLOW) + entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( @@ -126,7 +126,7 @@ async def test_step_reauth(hass: HomeAssistant) -> None: async def test_step_reauth_failed(hass: HomeAssistant, skybell_mock) -> None: """Test the reauth flow fails and recovers.""" - entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_CONFIG_FLOW) + entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( diff --git a/tests/components/smappee/test_config_flow.py b/tests/components/smappee/test_config_flow.py index f6f5ab66708..8d4d7b8c3b2 100644 --- a/tests/components/smappee/test_config_flow.py +++ b/tests/components/smappee/test_config_flow.py @@ -146,9 +146,7 @@ async def test_user_local_connection_error(hass: HomeAssistant) -> None: "pysmappee.mqtt.SmappeeLocalMqtt.start_attempt", return_value=True ), patch("pysmappee.mqtt.SmappeeLocalMqtt.start", return_value=True), patch( "pysmappee.mqtt.SmappeeLocalMqtt.stop", return_value=True - ), patch( - "pysmappee.mqtt.SmappeeLocalMqtt.is_config_ready", return_value=None - ): + ), patch("pysmappee.mqtt.SmappeeLocalMqtt.is_config_ready", return_value=None): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -474,9 +472,7 @@ async def test_full_zeroconf_flow(hass: HomeAssistant) -> None: ), patch( "pysmappee.api.SmappeeLocalApi.load_instantaneous", return_value=[{"key": "phase0ActivePower", "value": 0}], - ), patch( - "homeassistant.components.smappee.async_setup_entry", return_value=True - ): + ), patch("homeassistant.components.smappee.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, @@ -516,9 +512,7 @@ async def test_full_user_local_flow(hass: HomeAssistant) -> None: ), patch( "pysmappee.api.SmappeeLocalApi.load_instantaneous", return_value=[{"key": "phase0ActivePower", "value": 0}], - ), patch( - "homeassistant.components.smappee.async_setup_entry", return_value=True - ): + ), patch("homeassistant.components.smappee.async_setup_entry", return_value=True): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, diff --git a/tests/components/smartthings/test_climate.py b/tests/components/smartthings/test_climate.py index ce875190efb..e74d69f04c9 100644 --- a/tests/components/smartthings/test_climate.py +++ b/tests/components/smartthings/test_climate.py @@ -15,16 +15,20 @@ from homeassistant.components.climate import ( ATTR_HVAC_ACTION, ATTR_HVAC_MODE, ATTR_HVAC_MODES, + ATTR_PRESET_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN as CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, ClimateEntityFeature, HVACAction, HVACMode, ) +from homeassistant.components.climate.const import ATTR_SWING_MODE from homeassistant.components.smartthings import climate from homeassistant.components.smartthings.const import DOMAIN from homeassistant.const import ( @@ -155,6 +159,7 @@ def air_conditioner_fixture(device_factory): Capability.switch, Capability.temperature_measurement, Capability.thermostat_cooling_setpoint, + Capability.fan_oscillation_mode, ], status={ Attribute.air_conditioner_mode: "auto", @@ -182,6 +187,14 @@ def air_conditioner_fixture(device_factory): ], Attribute.switch: "on", Attribute.cooling_setpoint: 23, + "supportedAcOptionalMode": ["windFree"], + Attribute.supported_fan_oscillation_modes: [ + "all", + "horizontal", + "vertical", + "fixed", + ], + Attribute.fan_oscillation_mode: "vertical", }, ) device.status.attributes[Attribute.temperature] = Status(24, "C", None) @@ -303,7 +316,10 @@ async def test_air_conditioner_entity_state( assert state.state == HVACMode.HEAT_COOL assert ( state.attributes[ATTR_SUPPORTED_FEATURES] - == ClimateEntityFeature.FAN_MODE | ClimateEntityFeature.TARGET_TEMPERATURE + == ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.SWING_MODE ) assert sorted(state.attributes[ATTR_HVAC_MODES]) == [ HVACMode.COOL, @@ -591,3 +607,40 @@ async def test_entity_and_device_attributes(hass: HomeAssistant, thermostat) -> assert entry.manufacturer == "Generic manufacturer" assert entry.hw_version == "v4.56" assert entry.sw_version == "v7.89" + + +async def test_set_windfree_off(hass: HomeAssistant, air_conditioner) -> None: + """Test if the windfree preset can be turned on and is turned off when fan mode is set.""" + entity_ids = ["climate.air_conditioner"] + air_conditioner.status.update_attribute_value(Attribute.switch, "on") + await setup_platform(hass, CLIMATE_DOMAIN, devices=[air_conditioner]) + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: entity_ids, ATTR_PRESET_MODE: "windFree"}, + blocking=True, + ) + state = hass.states.get("climate.air_conditioner") + assert state.attributes[ATTR_PRESET_MODE] == "windFree" + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + {ATTR_ENTITY_ID: entity_ids, ATTR_FAN_MODE: "low"}, + blocking=True, + ) + state = hass.states.get("climate.air_conditioner") + assert not state.attributes[ATTR_PRESET_MODE] + + +async def test_set_swing_mode(hass: HomeAssistant, air_conditioner) -> None: + """Test the fan swing is set successfully.""" + await setup_platform(hass, CLIMATE_DOMAIN, devices=[air_conditioner]) + entity_ids = ["climate.air_conditioner"] + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_SWING_MODE, + {ATTR_ENTITY_ID: entity_ids, ATTR_SWING_MODE: "vertical"}, + blocking=True, + ) + state = hass.states.get("climate.air_conditioner") + assert state.attributes[ATTR_SWING_MODE] == "vertical" diff --git a/tests/components/smhi/snapshots/test_weather.ambr b/tests/components/smhi/snapshots/test_weather.ambr index ade151ed128..fa9d76c68ba 100644 --- a/tests/components/smhi/snapshots/test_weather.ambr +++ b/tests/components/smhi/snapshots/test_weather.ambr @@ -195,6 +195,418 @@ ]), }) # --- +# name: test_forecast_service[forecast] + dict({ + 'weather.smhi_test': dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-07T12:00:00', + 'humidity': 96, + 'precipitation': 0.0, + 'pressure': 991.0, + 'temperature': 18.0, + 'templow': 15.0, + 'wind_bearing': 114, + 'wind_gust_speed': 32.76, + 'wind_speed': 10.08, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2023-08-08T12:00:00', + 'humidity': 97, + 'precipitation': 10.6, + 'pressure': 984.0, + 'temperature': 15.0, + 'templow': 11.0, + 'wind_bearing': 183, + 'wind_gust_speed': 27.36, + 'wind_speed': 11.16, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2023-08-09T12:00:00', + 'humidity': 95, + 'precipitation': 6.3, + 'pressure': 1001.0, + 'temperature': 12.0, + 'templow': 11.0, + 'wind_bearing': 166, + 'wind_gust_speed': 48.24, + 'wind_speed': 18.0, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-10T12:00:00', + 'humidity': 75, + 'precipitation': 4.8, + 'pressure': 1011.0, + 'temperature': 14.0, + 'templow': 10.0, + 'wind_bearing': 174, + 'wind_gust_speed': 29.16, + 'wind_speed': 11.16, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-11T12:00:00', + 'humidity': 69, + 'precipitation': 0.6, + 'pressure': 1015.0, + 'temperature': 18.0, + 'templow': 12.0, + 'wind_bearing': 197, + 'wind_gust_speed': 27.36, + 'wind_speed': 10.08, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-12T12:00:00', + 'humidity': 82, + 'precipitation': 0.0, + 'pressure': 1014.0, + 'temperature': 17.0, + 'templow': 12.0, + 'wind_bearing': 225, + 'wind_gust_speed': 28.08, + 'wind_speed': 8.64, + }), + dict({ + 'cloud_coverage': 75, + 'condition': 'partlycloudy', + 'datetime': '2023-08-13T12:00:00', + 'humidity': 59, + 'precipitation': 0.0, + 'pressure': 1013.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 234, + 'wind_gust_speed': 35.64, + 'wind_speed': 14.76, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'partlycloudy', + 'datetime': '2023-08-14T12:00:00', + 'humidity': 56, + 'precipitation': 0.0, + 'pressure': 1015.0, + 'temperature': 21.0, + 'templow': 14.0, + 'wind_bearing': 216, + 'wind_gust_speed': 33.12, + 'wind_speed': 13.68, + }), + dict({ + 'cloud_coverage': 88, + 'condition': 'partlycloudy', + 'datetime': '2023-08-15T12:00:00', + 'humidity': 64, + 'precipitation': 3.6, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 226, + 'wind_gust_speed': 33.12, + 'wind_speed': 13.68, + }), + dict({ + 'cloud_coverage': 75, + 'condition': 'partlycloudy', + 'datetime': '2023-08-16T12:00:00', + 'humidity': 61, + 'precipitation': 2.4, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 233, + 'wind_gust_speed': 33.48, + 'wind_speed': 14.04, + }), + ]), + }), + }) +# --- +# name: test_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-07T12:00:00', + 'humidity': 96, + 'precipitation': 0.0, + 'pressure': 991.0, + 'temperature': 18.0, + 'templow': 15.0, + 'wind_bearing': 114, + 'wind_gust_speed': 32.76, + 'wind_speed': 10.08, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2023-08-08T12:00:00', + 'humidity': 97, + 'precipitation': 10.6, + 'pressure': 984.0, + 'temperature': 15.0, + 'templow': 11.0, + 'wind_bearing': 183, + 'wind_gust_speed': 27.36, + 'wind_speed': 11.16, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2023-08-09T12:00:00', + 'humidity': 95, + 'precipitation': 6.3, + 'pressure': 1001.0, + 'temperature': 12.0, + 'templow': 11.0, + 'wind_bearing': 166, + 'wind_gust_speed': 48.24, + 'wind_speed': 18.0, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-10T12:00:00', + 'humidity': 75, + 'precipitation': 4.8, + 'pressure': 1011.0, + 'temperature': 14.0, + 'templow': 10.0, + 'wind_bearing': 174, + 'wind_gust_speed': 29.16, + 'wind_speed': 11.16, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-11T12:00:00', + 'humidity': 69, + 'precipitation': 0.6, + 'pressure': 1015.0, + 'temperature': 18.0, + 'templow': 12.0, + 'wind_bearing': 197, + 'wind_gust_speed': 27.36, + 'wind_speed': 10.08, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-12T12:00:00', + 'humidity': 82, + 'precipitation': 0.0, + 'pressure': 1014.0, + 'temperature': 17.0, + 'templow': 12.0, + 'wind_bearing': 225, + 'wind_gust_speed': 28.08, + 'wind_speed': 8.64, + }), + dict({ + 'cloud_coverage': 75, + 'condition': 'partlycloudy', + 'datetime': '2023-08-13T12:00:00', + 'humidity': 59, + 'precipitation': 0.0, + 'pressure': 1013.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 234, + 'wind_gust_speed': 35.64, + 'wind_speed': 14.76, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'partlycloudy', + 'datetime': '2023-08-14T12:00:00', + 'humidity': 56, + 'precipitation': 0.0, + 'pressure': 1015.0, + 'temperature': 21.0, + 'templow': 14.0, + 'wind_bearing': 216, + 'wind_gust_speed': 33.12, + 'wind_speed': 13.68, + }), + dict({ + 'cloud_coverage': 88, + 'condition': 'partlycloudy', + 'datetime': '2023-08-15T12:00:00', + 'humidity': 64, + 'precipitation': 3.6, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 226, + 'wind_gust_speed': 33.12, + 'wind_speed': 13.68, + }), + dict({ + 'cloud_coverage': 75, + 'condition': 'partlycloudy', + 'datetime': '2023-08-16T12:00:00', + 'humidity': 61, + 'precipitation': 2.4, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 233, + 'wind_gust_speed': 33.48, + 'wind_speed': 14.04, + }), + ]), + }) +# --- +# name: test_forecast_service[get_forecasts] + dict({ + 'weather.smhi_test': dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-07T12:00:00', + 'humidity': 96, + 'precipitation': 0.0, + 'pressure': 991.0, + 'temperature': 18.0, + 'templow': 15.0, + 'wind_bearing': 114, + 'wind_gust_speed': 32.76, + 'wind_speed': 10.08, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2023-08-08T12:00:00', + 'humidity': 97, + 'precipitation': 10.6, + 'pressure': 984.0, + 'temperature': 15.0, + 'templow': 11.0, + 'wind_bearing': 183, + 'wind_gust_speed': 27.36, + 'wind_speed': 11.16, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2023-08-09T12:00:00', + 'humidity': 95, + 'precipitation': 6.3, + 'pressure': 1001.0, + 'temperature': 12.0, + 'templow': 11.0, + 'wind_bearing': 166, + 'wind_gust_speed': 48.24, + 'wind_speed': 18.0, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-10T12:00:00', + 'humidity': 75, + 'precipitation': 4.8, + 'pressure': 1011.0, + 'temperature': 14.0, + 'templow': 10.0, + 'wind_bearing': 174, + 'wind_gust_speed': 29.16, + 'wind_speed': 11.16, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-11T12:00:00', + 'humidity': 69, + 'precipitation': 0.6, + 'pressure': 1015.0, + 'temperature': 18.0, + 'templow': 12.0, + 'wind_bearing': 197, + 'wind_gust_speed': 27.36, + 'wind_speed': 10.08, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2023-08-12T12:00:00', + 'humidity': 82, + 'precipitation': 0.0, + 'pressure': 1014.0, + 'temperature': 17.0, + 'templow': 12.0, + 'wind_bearing': 225, + 'wind_gust_speed': 28.08, + 'wind_speed': 8.64, + }), + dict({ + 'cloud_coverage': 75, + 'condition': 'partlycloudy', + 'datetime': '2023-08-13T12:00:00', + 'humidity': 59, + 'precipitation': 0.0, + 'pressure': 1013.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 234, + 'wind_gust_speed': 35.64, + 'wind_speed': 14.76, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'partlycloudy', + 'datetime': '2023-08-14T12:00:00', + 'humidity': 56, + 'precipitation': 0.0, + 'pressure': 1015.0, + 'temperature': 21.0, + 'templow': 14.0, + 'wind_bearing': 216, + 'wind_gust_speed': 33.12, + 'wind_speed': 13.68, + }), + dict({ + 'cloud_coverage': 88, + 'condition': 'partlycloudy', + 'datetime': '2023-08-15T12:00:00', + 'humidity': 64, + 'precipitation': 3.6, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 226, + 'wind_gust_speed': 33.12, + 'wind_speed': 13.68, + }), + dict({ + 'cloud_coverage': 75, + 'condition': 'partlycloudy', + 'datetime': '2023-08-16T12:00:00', + 'humidity': 61, + 'precipitation': 2.4, + 'pressure': 1014.0, + 'temperature': 20.0, + 'templow': 14.0, + 'wind_bearing': 233, + 'wind_gust_speed': 33.48, + 'wind_speed': 14.04, + }), + ]), + }), + }) +# --- # name: test_forecast_services dict({ 'cloud_coverage': 100, diff --git a/tests/components/smhi/test_weather.py b/tests/components/smhi/test_weather.py index 67aa18ea75d..f12aa92df3c 100644 --- a/tests/components/smhi/test_weather.py +++ b/tests/components/smhi/test_weather.py @@ -20,7 +20,8 @@ from homeassistant.components.weather import ( ATTR_WEATHER_WIND_SPEED, ATTR_WEATHER_WIND_SPEED_UNIT, DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, ) from homeassistant.components.weather.const import ( ATTR_WEATHER_CLOUD_COVERAGE, @@ -443,11 +444,19 @@ async def test_forecast_services_lack_of_data( assert forecast1 is None +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) async def test_forecast_service( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, api_response: str, snapshot: SnapshotAssertion, + service: str, ) -> None: """Test forecast service.""" uri = APIURL_TEMPLATE.format( @@ -463,7 +472,7 @@ async def test_forecast_service( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": ENTITY_ID, "type": "daily"}, blocking=True, return_response=True, diff --git a/tests/components/smtp/test_notify.py b/tests/components/smtp/test_notify.py index 86a21c754ed..bca5a5674df 100644 --- a/tests/components/smtp/test_notify.py +++ b/tests/components/smtp/test_notify.py @@ -101,7 +101,7 @@ EMAIL_DATA = [ ( "Test msg", {"images": ["tests/testing_config/notify/test.jpg"]}, - "Content-Type: multipart/related", + "Content-Type: multipart/mixed", ), ( "Test msg", diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index cb912af1cf6..648ca12803c 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -230,9 +230,9 @@ async def silent_ssdp_scanner(hass): ), patch("homeassistant.components.ssdp.Scanner._async_stop_ssdp_listeners"), patch( "homeassistant.components.ssdp.Scanner.async_scan" ), patch( - "homeassistant.components.ssdp.Server._async_start_upnp_servers" + "homeassistant.components.ssdp.Server._async_start_upnp_servers", ), patch( - "homeassistant.components.ssdp.Server._async_stop_upnp_servers" + "homeassistant.components.ssdp.Server._async_stop_upnp_servers", ): yield diff --git a/tests/components/subaru/conftest.py b/tests/components/subaru/conftest.py index 678e8ba5034..8bed67cb15f 100644 --- a/tests/components/subaru/conftest.py +++ b/tests/components/subaru/conftest.py @@ -145,9 +145,7 @@ async def setup_subaru_config_entry( return_value=vehicle_status, ), patch( MOCK_API_UPDATE, - ), patch( - MOCK_API_FETCH, side_effect=fetch_effect - ): + ), patch(MOCK_API_FETCH, side_effect=fetch_effect): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/switchbee/test_config_flow.py b/tests/components/switchbee/test_config_flow.py index 239777a4da3..98d413c3b96 100644 --- a/tests/components/switchbee/test_config_flow.py +++ b/tests/components/switchbee/test_config_flow.py @@ -39,9 +39,7 @@ async def test_form(hass: HomeAssistant, test_cucode_in_coordinator_data) -> Non return_value=True, ), patch( "switchbee.api.polling.CentralUnitPolling.fetch_states", return_value=None - ), patch( - "switchbee.api.polling.CentralUnitPolling._login", return_value=None - ): + ), patch("switchbee.api.polling.CentralUnitPolling._login", return_value=None): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { diff --git a/tests/components/system_bridge/test_config_flow.py b/tests/components/system_bridge/test_config_flow.py index 39ecc95d89e..ff517b8963d 100644 --- a/tests/components/system_bridge/test_config_flow.py +++ b/tests/components/system_bridge/test_config_flow.py @@ -152,7 +152,7 @@ async def test_user_flow(hass: HomeAssistant) -> None: "systembridgeconnector.websocket_client.WebSocketClient.get_data", return_value=FIXTURE_DATA_RESPONSE, ), patch( - "systembridgeconnector.websocket_client.WebSocketClient.listen" + "systembridgeconnector.websocket_client.WebSocketClient.listen", ), patch( "homeassistant.components.system_bridge.async_setup_entry", return_value=True, @@ -450,7 +450,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "systembridgeconnector.websocket_client.WebSocketClient.get_data", return_value=FIXTURE_DATA_RESPONSE, ), patch( - "systembridgeconnector.websocket_client.WebSocketClient.listen" + "systembridgeconnector.websocket_client.WebSocketClient.listen", ), patch( "homeassistant.components.system_bridge.async_setup_entry", return_value=True, @@ -484,7 +484,7 @@ async def test_zeroconf_flow(hass: HomeAssistant) -> None: "systembridgeconnector.websocket_client.WebSocketClient.get_data", return_value=FIXTURE_DATA_RESPONSE, ), patch( - "systembridgeconnector.websocket_client.WebSocketClient.listen" + "systembridgeconnector.websocket_client.WebSocketClient.listen", ), patch( "homeassistant.components.system_bridge.async_setup_entry", return_value=True, diff --git a/tests/components/tag/test_event.py b/tests/components/tag/test_event.py new file mode 100644 index 00000000000..7112a0cda4f --- /dev/null +++ b/tests/components/tag/test_event.py @@ -0,0 +1,106 @@ +"""Tests for the tag component.""" +from unittest.mock import patch + +import pytest + +from homeassistant.components.tag import DOMAIN, EVENT_TAG_SCANNED, async_scan_tag +from homeassistant.const import CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + +from tests.common import async_capture_events +from tests.typing import WebSocketGenerator + +TEST_TAG_ID = "test tag id" +TEST_TAG_NAME = "test tag name" +TEST_DEVICE_ID = "device id" + + +@pytest.fixture +def storage_setup_named_tag( + hass, + hass_storage, +): + """Storage setup for test case of named tags.""" + + async def _storage(items=None): + if items is None: + hass_storage[DOMAIN] = { + "key": DOMAIN, + "version": 1, + "data": {"items": [{"id": TEST_TAG_ID, CONF_NAME: TEST_TAG_NAME}]}, + } + else: + hass_storage[DOMAIN] = items + config = {DOMAIN: {}} + return await async_setup_component(hass, DOMAIN, config) + + return _storage + + +async def test_named_tag_scanned_event( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, storage_setup_named_tag +) -> None: + """Test scanning named tag triggering event.""" + assert await storage_setup_named_tag() + + await hass_ws_client(hass) + + events = async_capture_events(hass, EVENT_TAG_SCANNED) + + now = dt_util.utcnow() + with patch("homeassistant.util.dt.utcnow", return_value=now): + await async_scan_tag(hass, TEST_TAG_ID, TEST_DEVICE_ID) + + assert len(events) == 1 + + event = events[0] + event_data = event.data + + assert event_data["name"] == TEST_TAG_NAME + assert event_data["device_id"] == TEST_DEVICE_ID + assert event_data["tag_id"] == TEST_TAG_ID + + +@pytest.fixture +def storage_setup_unnamed_tag(hass, hass_storage): + """Storage setup for test case of unnamed tags.""" + + async def _storage(items=None): + if items is None: + hass_storage[DOMAIN] = { + "key": DOMAIN, + "version": 1, + "data": {"items": [{"id": TEST_TAG_ID}]}, + } + else: + hass_storage[DOMAIN] = items + config = {DOMAIN: {}} + return await async_setup_component(hass, DOMAIN, config) + + return _storage + + +async def test_unnamed_tag_scanned_event( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, storage_setup_unnamed_tag +) -> None: + """Test scanning named tag triggering event.""" + assert await storage_setup_unnamed_tag() + + await hass_ws_client(hass) + + events = async_capture_events(hass, EVENT_TAG_SCANNED) + + now = dt_util.utcnow() + with patch("homeassistant.util.dt.utcnow", return_value=now): + await async_scan_tag(hass, TEST_TAG_ID, TEST_DEVICE_ID) + + assert len(events) == 1 + + event = events[0] + event_data = event.data + + assert event_data["name"] is None + assert event_data["device_id"] == TEST_DEVICE_ID + assert event_data["tag_id"] == TEST_TAG_ID diff --git a/tests/components/tag/test_init.py b/tests/components/tag/test_init.py index 3e034d2b9f2..5d54f31b13a 100644 --- a/tests/components/tag/test_init.py +++ b/tests/components/tag/test_init.py @@ -131,5 +131,5 @@ async def test_tag_id_exists( await client.send_json({"id": 2, "type": f"{DOMAIN}/create", "tag_id": "test tag"}) response = await client.receive_json() assert not response["success"] - assert response["error"]["code"] == "unknown_error" + assert response["error"]["code"] == "home_assistant_error" assert len(changes) == 0 diff --git a/tests/components/template/snapshots/test_weather.ambr b/tests/components/template/snapshots/test_weather.ambr index 72af2ab1637..0ee7f967176 100644 --- a/tests/components/template/snapshots/test_weather.ambr +++ b/tests/components/template/snapshots/test_weather.ambr @@ -1,4 +1,155 @@ # serializer version: 1 +# name: test_forecasts[config0-1-weather-forecast] + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-forecast].1 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-forecast].2 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'fog', + 'datetime': '2023-02-17T14:00:00+00:00', + 'is_daytime': True, + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-forecast].3 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 16.9, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecast].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'fog', + 'datetime': '2023-02-17T14:00:00+00:00', + 'is_daytime': True, + 'temperature': 14.2, + }), + ]), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecast].3 + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 16.9, + }), + ]), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecasts] + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecasts].1 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecasts].2 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'fog', + 'datetime': '2023-02-17T14:00:00+00:00', + 'is_daytime': True, + 'temperature': 14.2, + }), + ]), + }), + }) +# --- +# name: test_forecasts[config0-1-weather-get_forecasts].3 + dict({ + 'weather.forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-02-17T14:00:00+00:00', + 'temperature': 16.9, + }), + ]), + }), + }) +# --- # name: test_forecasts[config0-1-weather] dict({ 'forecast': list([ @@ -59,6 +210,138 @@ 'last_wind_speed': None, }) # --- +# name: test_trigger_weather_services[config0-1-template-forecast] + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-forecast].1 + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-forecast].2 + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'is_daytime': True, + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecast].2 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'is_daytime': True, + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecasts] + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecasts].1 + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- +# name: test_trigger_weather_services[config0-1-template-get_forecasts].2 + dict({ + 'weather.test': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2023-10-19T06:50:05-07:00', + 'is_daytime': True, + 'precipitation': 20.0, + 'temperature': 20.0, + 'templow': 15.0, + }), + ]), + }), + }) +# --- # name: test_trigger_weather_services[config0-1-template] dict({ 'forecast': list([ diff --git a/tests/components/template/test_alarm_control_panel.py b/tests/components/template/test_alarm_control_panel.py index dd4fa1d32a5..ef2390680b6 100644 --- a/tests/components/template/test_alarm_control_panel.py +++ b/tests/components/template/test_alarm_control_panel.py @@ -198,13 +198,13 @@ async def test_optimistic_states(hass: HomeAssistant, start_ha) -> None: "wibble": {"test_panel": "Invalid"}, } }, - "[wibble] is an invalid option", + "'wibble' is an invalid option", ), ( { "alarm_control_panel": {"platform": "template"}, }, - "required key not provided @ data['panels']", + "required key 'panels' not provided", ), ( { diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index f4cfe90b9f0..b95a68afd85 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -845,4 +845,4 @@ async def test_option_flow_sensor_preview_config_entry_removed( ) msg = await client.receive_json() assert not msg["success"] - assert msg["error"] == {"code": "unknown_error", "message": "Unknown error"} + assert msg["error"] == {"code": "home_assistant_error", "message": "Unknown error"} diff --git a/tests/components/template/test_cover.py b/tests/components/template/test_cover.py index fefad59aa08..35f03ee9508 100644 --- a/tests/components/template/test_cover.py +++ b/tests/components/template/test_cover.py @@ -424,7 +424,7 @@ async def test_template_open_or_position( ) -> None: """Test that at least one of open_cover or set_position is used.""" assert hass.states.async_all("cover") == [] - assert "Invalid config for [cover.template]" in caplog_setup_text + assert "Invalid config for 'cover.template'" in caplog_setup_text @pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) diff --git a/tests/components/template/test_fan.py b/tests/components/template/test_fan.py index f9b0bddddcf..ccdafebd8bb 100644 --- a/tests/components/template/test_fan.py +++ b/tests/components/template/test_fan.py @@ -12,6 +12,7 @@ from homeassistant.components.fan import ( DIRECTION_REVERSE, DOMAIN, FanEntityFeature, + NotValidPresetModeError, ) from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -489,7 +490,11 @@ async def test_preset_modes(hass: HomeAssistant, calls) -> None: ("smart", "smart", 3), ("invalid", "smart", 3), ]: - await common.async_set_preset_mode(hass, _TEST_FAN, extra) + if extra != state: + with pytest.raises(NotValidPresetModeError): + await common.async_set_preset_mode(hass, _TEST_FAN, extra) + else: + await common.async_set_preset_mode(hass, _TEST_FAN, extra) assert hass.states.get(_PRESET_MODE_INPUT_SELECT).state == state assert len(calls) == expected_calls assert calls[-1].data["action"] == "set_preset_mode" @@ -550,6 +555,7 @@ async def test_no_value_template(hass: HomeAssistant, calls) -> None: with assert_setup_component(1, "fan"): test_fan_config = { "preset_mode_template": "{{ states('input_select.preset_mode') }}", + "preset_modes": ["auto"], "percentage_template": "{{ states('input_number.percentage') }}", "oscillating_template": "{{ states('input_select.osc') }}", "direction_template": "{{ states('input_select.direction') }}", @@ -625,18 +631,18 @@ async def test_no_value_template(hass: HomeAssistant, calls) -> None: await hass.async_block_till_done() await common.async_turn_on(hass, _TEST_FAN) - _verify(hass, STATE_ON, 0, None, None, None) + _verify(hass, STATE_ON, 0, None, None, "auto") await common.async_turn_off(hass, _TEST_FAN) - _verify(hass, STATE_OFF, 0, None, None, None) + _verify(hass, STATE_OFF, 0, None, None, "auto") percent = 100 await common.async_set_percentage(hass, _TEST_FAN, percent) assert int(float(hass.states.get(_PERCENTAGE_INPUT_NUMBER).state)) == percent - _verify(hass, STATE_ON, percent, None, None, None) + _verify(hass, STATE_ON, percent, None, None, "auto") await common.async_turn_off(hass, _TEST_FAN) - _verify(hass, STATE_OFF, percent, None, None, None) + _verify(hass, STATE_OFF, percent, None, None, "auto") preset = "auto" await common.async_set_preset_mode(hass, _TEST_FAN, preset) diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index f807b185c45..ec830d4daf6 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -7,6 +7,9 @@ from homeassistant.components.light import ( ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR, + ATTR_RGB_COLOR, + ATTR_RGBW_COLOR, + ATTR_RGBWW_COLOR, ATTR_TRANSITION, ColorMode, LightEntityFeature, @@ -72,7 +75,7 @@ OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG = { } -OPTIMISTIC_HS_COLOR_LIGHT_CONFIG = { +OPTIMISTIC_LEGACY_COLOR_LIGHT_CONFIG = { **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, "set_color": { "service": "test.automation", @@ -86,6 +89,68 @@ OPTIMISTIC_HS_COLOR_LIGHT_CONFIG = { } +OPTIMISTIC_HS_COLOR_LIGHT_CONFIG = { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "set_hs": { + "service": "test.automation", + "data_template": { + "action": "set_hs", + "caller": "{{ this.entity_id }}", + "s": "{{s}}", + "h": "{{h}}", + }, + }, +} + + +OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG = { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "set_rgb": { + "service": "test.automation", + "data_template": { + "action": "set_rgb", + "caller": "{{ this.entity_id }}", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + }, + }, +} + + +OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG = { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "set_rgbw": { + "service": "test.automation", + "data_template": { + "action": "set_rgbw", + "caller": "{{ this.entity_id }}", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + "w": "{{w}}", + }, + }, +} + + +OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG = { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "set_rgbww": { + "service": "test.automation", + "data_template": { + "action": "set_rgbww", + "caller": "{{ this.entity_id }}", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + "cw": "{{cw}}", + "ww": "{{ww}}", + }, + }, +} + + async def async_setup_light(hass, count, light_config): """Do setup of light integration.""" config = {"light": {"platform": "template", "lights": light_config}} @@ -607,6 +672,7 @@ async def test_level_action_no_template( "{{ state_attr('light.nolight', 'brightness') }}", ColorMode.BRIGHTNESS, ), + (None, "{{'one'}}", ColorMode.BRIGHTNESS), ], ) async def test_level_template( @@ -643,6 +709,7 @@ async def test_level_template( (None, "None", ColorMode.COLOR_TEMP), (None, "{{ none }}", ColorMode.COLOR_TEMP), (None, "", ColorMode.COLOR_TEMP), + (None, "{{ 'one' }}", ColorMode.COLOR_TEMP), ], ) async def test_temperature_template( @@ -797,17 +864,17 @@ async def test_entity_picture_template(hass: HomeAssistant, setup_light) -> None [ { "test_template_light": { - **OPTIMISTIC_HS_COLOR_LIGHT_CONFIG, + **OPTIMISTIC_LEGACY_COLOR_LIGHT_CONFIG, "value_template": "{{1 == 1}}", } }, ], ) -async def test_color_action_no_template( - hass: HomeAssistant, +async def test_legacy_color_action_no_template( + hass, setup_light, calls, -) -> None: +): """Test setting color with optimistic template.""" state = hass.states.get("light.test_template_light") assert state.attributes.get("hs_color") is None @@ -833,6 +900,186 @@ async def test_color_action_no_template( assert state.attributes["supported_features"] == 0 +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + "light_config", + [ + { + "test_template_light": { + **OPTIMISTIC_HS_COLOR_LIGHT_CONFIG, + "value_template": "{{1 == 1}}", + } + }, + ], +) +async def test_hs_color_action_no_template( + hass: HomeAssistant, + setup_light, + calls, +) -> None: + """Test setting hs color with optimistic template.""" + state = hass.states.get("light.test_template_light") + assert state.attributes.get("hs_color") is None + + await hass.services.async_call( + light.DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_HS_COLOR: (40, 50)}, + blocking=True, + ) + + assert len(calls) == 1 + assert calls[-1].data["action"] == "set_hs" + assert calls[-1].data["caller"] == "light.test_template_light" + assert calls[-1].data["h"] == 40 + assert calls[-1].data["s"] == 50 + + state = hass.states.get("light.test_template_light") + assert state.state == STATE_ON + assert state.attributes["color_mode"] == ColorMode.HS + assert state.attributes.get("hs_color") == (40, 50) + assert state.attributes["supported_color_modes"] == [ColorMode.HS] + assert state.attributes["supported_features"] == 0 + + +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + "light_config", + [ + { + "test_template_light": { + **OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG, + "value_template": "{{1 == 1}}", + } + }, + ], +) +async def test_rgb_color_action_no_template( + hass: HomeAssistant, + setup_light, + calls, +) -> None: + """Test setting rgb color with optimistic template.""" + state = hass.states.get("light.test_template_light") + assert state.attributes.get("rgb_color") is None + + await hass.services.async_call( + light.DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_RGB_COLOR: (160, 78, 192)}, + blocking=True, + ) + + assert len(calls) == 1 + assert calls[-1].data["action"] == "set_rgb" + assert calls[-1].data["caller"] == "light.test_template_light" + assert calls[-1].data["r"] == 160 + assert calls[-1].data["g"] == 78 + assert calls[-1].data["b"] == 192 + + state = hass.states.get("light.test_template_light") + assert state.state == STATE_ON + assert state.attributes["color_mode"] == ColorMode.RGB + assert state.attributes.get("rgb_color") == (160, 78, 192) + assert state.attributes["supported_color_modes"] == [ColorMode.RGB] + assert state.attributes["supported_features"] == 0 + + +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + "light_config", + [ + { + "test_template_light": { + **OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG, + "value_template": "{{1 == 1}}", + } + }, + ], +) +async def test_rgbw_color_action_no_template( + hass: HomeAssistant, + setup_light, + calls, +) -> None: + """Test setting rgbw color with optimistic template.""" + state = hass.states.get("light.test_template_light") + assert state.attributes.get("rgbw_color") is None + + await hass.services.async_call( + light.DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.test_template_light", + ATTR_RGBW_COLOR: (160, 78, 192, 25), + }, + blocking=True, + ) + + assert len(calls) == 1 + assert calls[-1].data["action"] == "set_rgbw" + assert calls[-1].data["caller"] == "light.test_template_light" + assert calls[-1].data["r"] == 160 + assert calls[-1].data["g"] == 78 + assert calls[-1].data["b"] == 192 + assert calls[-1].data["w"] == 25 + + state = hass.states.get("light.test_template_light") + assert state.state == STATE_ON + assert state.attributes["color_mode"] == ColorMode.RGBW + assert state.attributes.get("rgbw_color") == (160, 78, 192, 25) + assert state.attributes["supported_color_modes"] == [ColorMode.RGBW] + assert state.attributes["supported_features"] == 0 + + +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + "light_config", + [ + { + "test_template_light": { + **OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG, + "value_template": "{{1 == 1}}", + } + }, + ], +) +async def test_rgbww_color_action_no_template( + hass: HomeAssistant, + setup_light, + calls, +) -> None: + """Test setting rgbww color with optimistic template.""" + state = hass.states.get("light.test_template_light") + assert state.attributes.get("rgbww_color") is None + + await hass.services.async_call( + light.DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.test_template_light", + ATTR_RGBWW_COLOR: (160, 78, 192, 25, 55), + }, + blocking=True, + ) + + assert len(calls) == 1 + assert calls[-1].data["action"] == "set_rgbww" + assert calls[-1].data["caller"] == "light.test_template_light" + assert calls[-1].data["r"] == 160 + assert calls[-1].data["g"] == 78 + assert calls[-1].data["b"] == 192 + assert calls[-1].data["cw"] == 25 + assert calls[-1].data["ww"] == 55 + + state = hass.states.get("light.test_template_light") + assert state.state == STATE_ON + assert state.attributes["color_mode"] == ColorMode.RGBWW + assert state.attributes.get("rgbww_color") == (160, 78, 192, 25, 55) + assert state.attributes["supported_color_modes"] == [ColorMode.RGBWW] + assert state.attributes["supported_features"] == 0 + + @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( ("expected_hs", "color_template", "expected_color_mode"), @@ -845,19 +1092,20 @@ async def test_color_action_no_template( (None, "{{x - 12}}", ColorMode.HS), (None, "", ColorMode.HS), (None, "{{ none }}", ColorMode.HS), + (None, "{{('one','two')}}", ColorMode.HS), ], ) -async def test_color_template( - hass: HomeAssistant, +async def test_legacy_color_template( + hass, expected_hs, expected_color_mode, count, color_template, -) -> None: +): """Test the template for the color.""" light_config = { "test_template_light": { - **OPTIMISTIC_HS_COLOR_LIGHT_CONFIG, + **OPTIMISTIC_LEGACY_COLOR_LIGHT_CONFIG, "value_template": "{{ 1 == 1 }}", "color_template": color_template, } @@ -871,6 +1119,176 @@ async def test_color_template( assert state.attributes["supported_features"] == 0 +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("expected_hs", "hs_template", "expected_color_mode"), + [ + ((360, 100), "{{(360, 100)}}", ColorMode.HS), + ((360, 100), "(360, 100)", ColorMode.HS), + ((359.9, 99.9), "{{(359.9, 99.9)}}", ColorMode.HS), + (None, "{{(361, 100)}}", ColorMode.HS), + (None, "{{(360, 101)}}", ColorMode.HS), + (None, "[{{(360)}},{{null}}]", ColorMode.HS), + (None, "{{x - 12}}", ColorMode.HS), + (None, "", ColorMode.HS), + (None, "{{ none }}", ColorMode.HS), + (None, "{{('one','two')}}", ColorMode.HS), + ], +) +async def test_hs_template( + hass: HomeAssistant, + expected_hs, + expected_color_mode, + count, + hs_template, +) -> None: + """Test the template for the color.""" + light_config = { + "test_template_light": { + **OPTIMISTIC_HS_COLOR_LIGHT_CONFIG, + "value_template": "{{ 1 == 1 }}", + "hs_template": hs_template, + } + } + await async_setup_light(hass, count, light_config) + state = hass.states.get("light.test_template_light") + assert state.attributes.get("hs_color") == expected_hs + assert state.state == STATE_ON + assert state.attributes["color_mode"] == expected_color_mode + assert state.attributes["supported_color_modes"] == [ColorMode.HS] + assert state.attributes["supported_features"] == 0 + + +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("expected_rgb", "rgb_template", "expected_color_mode"), + [ + ((160, 78, 192), "{{(160, 78, 192)}}", ColorMode.RGB), + ((160, 78, 192), "{{[160, 78, 192]}}", ColorMode.RGB), + ((160, 78, 192), "(160, 78, 192)", ColorMode.RGB), + ((159, 77, 191), "{{(159.9, 77.9, 191.9)}}", ColorMode.RGB), + (None, "{{(256, 100, 100)}}", ColorMode.RGB), + (None, "{{(100, 256, 100)}}", ColorMode.RGB), + (None, "{{(100, 100, 256)}}", ColorMode.RGB), + (None, "{{x - 12}}", ColorMode.RGB), + (None, "", ColorMode.RGB), + (None, "{{ none }}", ColorMode.RGB), + (None, "{{('one','two','tree')}}", ColorMode.RGB), + ], +) +async def test_rgb_template( + hass: HomeAssistant, + expected_rgb, + expected_color_mode, + count, + rgb_template, +) -> None: + """Test the template for the color.""" + light_config = { + "test_template_light": { + **OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG, + "value_template": "{{ 1 == 1 }}", + "rgb_template": rgb_template, + } + } + await async_setup_light(hass, count, light_config) + state = hass.states.get("light.test_template_light") + assert state.attributes.get("rgb_color") == expected_rgb + assert state.state == STATE_ON + assert state.attributes["color_mode"] == expected_color_mode + assert state.attributes["supported_color_modes"] == [ColorMode.RGB] + assert state.attributes["supported_features"] == 0 + + +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("expected_rgbw", "rgbw_template", "expected_color_mode"), + [ + ((160, 78, 192, 25), "{{(160, 78, 192, 25)}}", ColorMode.RGBW), + ((160, 78, 192, 25), "{{[160, 78, 192, 25]}}", ColorMode.RGBW), + ((160, 78, 192, 25), "(160, 78, 192, 25)", ColorMode.RGBW), + ((159, 77, 191, 24), "{{(159.9, 77.9, 191.9, 24.9)}}", ColorMode.RGBW), + (None, "{{(256, 100, 100, 100)}}", ColorMode.RGBW), + (None, "{{(100, 256, 100, 100)}}", ColorMode.RGBW), + (None, "{{(100, 100, 256, 100)}}", ColorMode.RGBW), + (None, "{{(100, 100, 100, 256)}}", ColorMode.RGBW), + (None, "{{x - 12}}", ColorMode.RGBW), + (None, "", ColorMode.RGBW), + (None, "{{ none }}", ColorMode.RGBW), + (None, "{{('one','two','tree','four')}}", ColorMode.RGBW), + ], +) +async def test_rgbw_template( + hass: HomeAssistant, + expected_rgbw, + expected_color_mode, + count, + rgbw_template, +) -> None: + """Test the template for the color.""" + light_config = { + "test_template_light": { + **OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG, + "value_template": "{{ 1 == 1 }}", + "rgbw_template": rgbw_template, + } + } + await async_setup_light(hass, count, light_config) + state = hass.states.get("light.test_template_light") + assert state.attributes.get("rgbw_color") == expected_rgbw + assert state.state == STATE_ON + assert state.attributes["color_mode"] == expected_color_mode + assert state.attributes["supported_color_modes"] == [ColorMode.RGBW] + assert state.attributes["supported_features"] == 0 + + +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("expected_rgbww", "rgbww_template", "expected_color_mode"), + [ + ((160, 78, 192, 25, 55), "{{(160, 78, 192, 25, 55)}}", ColorMode.RGBWW), + ((160, 78, 192, 25, 55), "(160, 78, 192, 25, 55)", ColorMode.RGBWW), + ((160, 78, 192, 25, 55), "{{[160, 78, 192, 25, 55]}}", ColorMode.RGBWW), + ( + (159, 77, 191, 24, 54), + "{{(159.9, 77.9, 191.9, 24.9, 54.9)}}", + ColorMode.RGBWW, + ), + (None, "{{(256, 100, 100, 100, 100)}}", ColorMode.RGBWW), + (None, "{{(100, 256, 100, 100, 100)}}", ColorMode.RGBWW), + (None, "{{(100, 100, 256, 100, 100)}}", ColorMode.RGBWW), + (None, "{{(100, 100, 100, 256, 100)}}", ColorMode.RGBWW), + (None, "{{(100, 100, 100, 100, 256)}}", ColorMode.RGBWW), + (None, "{{x - 12}}", ColorMode.RGBWW), + (None, "", ColorMode.RGBWW), + (None, "{{ none }}", ColorMode.RGBWW), + (None, "{{('one','two','tree','four','five')}}", ColorMode.RGBWW), + ], +) +async def test_rgbww_template( + hass: HomeAssistant, + expected_rgbww, + expected_color_mode, + count, + rgbww_template, +) -> None: + """Test the template for the color.""" + light_config = { + "test_template_light": { + **OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG, + "value_template": "{{ 1 == 1 }}", + "rgbww_template": rgbww_template, + } + } + await async_setup_light(hass, count, light_config) + state = hass.states.get("light.test_template_light") + assert state.attributes.get("rgbww_color") == expected_rgbww + assert state.state == STATE_ON + assert state.attributes["color_mode"] == expected_color_mode + assert state.attributes["supported_color_modes"] == [ColorMode.RGBWW] + assert state.attributes["supported_features"] == 0 + + @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( "light_config", @@ -879,16 +1297,14 @@ async def test_color_template( "test_template_light": { **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, "value_template": "{{1 == 1}}", - "set_color": [ - { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "h": "{{h}}", - "s": "{{s}}", - }, + "set_hs": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "h": "{{h}}", + "s": "{{s}}", }, - ], + }, "set_temperature": { "service": "test.automation", "data_template": { @@ -896,18 +1312,48 @@ async def test_color_template( "color_temp": "{{color_temp}}", }, }, + "set_rgb": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + }, + }, + "set_rgbw": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + "w": "{{w}}", + }, + }, + "set_rgbww": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + "cw": "{{cw}}", + "ww": "{{ww}}", + }, + }, } }, ], ) -async def test_color_and_temperature_actions_no_template( +async def test_all_colors_mode_no_template( hass: HomeAssistant, setup_light, calls ) -> None: """Test setting color and color temperature with optimistic template.""" state = hass.states.get("light.test_template_light") assert state.attributes.get("hs_color") is None - # Optimistically set color, light should be in hs_color mode + # Optimistically set hs color, light should be in hs_color mode await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, @@ -926,6 +1372,9 @@ async def test_color_and_temperature_actions_no_template( assert state.attributes["supported_color_modes"] == [ ColorMode.COLOR_TEMP, ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, ] assert state.attributes["supported_features"] == 0 @@ -947,10 +1396,100 @@ async def test_color_and_temperature_actions_no_template( assert state.attributes["supported_color_modes"] == [ ColorMode.COLOR_TEMP, ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, ] assert state.attributes["supported_features"] == 0 - # Optimistically set color, light should again be in hs_color mode + # Optimistically set rgb color, light should be in rgb_color mode + await hass.services.async_call( + light.DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_RGB_COLOR: (160, 78, 192)}, + blocking=True, + ) + + assert len(calls) == 3 + assert calls[-1].data["r"] == 160 + assert calls[-1].data["g"] == 78 + assert calls[-1].data["b"] == 192 + + state = hass.states.get("light.test_template_light") + assert state.attributes["color_mode"] == ColorMode.RGB + assert state.attributes["color_temp"] is None + assert state.attributes["rgb_color"] == (160, 78, 192) + assert state.attributes["supported_color_modes"] == [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, + ] + assert state.attributes["supported_features"] == 0 + + # Optimistically set rgbw color, light should be in rgb_color mode + await hass.services.async_call( + light.DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.test_template_light", + ATTR_RGBW_COLOR: (160, 78, 192, 25), + }, + blocking=True, + ) + + assert len(calls) == 4 + assert calls[-1].data["r"] == 160 + assert calls[-1].data["g"] == 78 + assert calls[-1].data["b"] == 192 + assert calls[-1].data["w"] == 25 + + state = hass.states.get("light.test_template_light") + assert state.attributes["color_mode"] == ColorMode.RGBW + assert state.attributes["color_temp"] is None + assert state.attributes["rgbw_color"] == (160, 78, 192, 25) + assert state.attributes["supported_color_modes"] == [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, + ] + assert state.attributes["supported_features"] == 0 + + # Optimistically set rgbww color, light should be in rgb_color mode + await hass.services.async_call( + light.DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.test_template_light", + ATTR_RGBWW_COLOR: (160, 78, 192, 25, 55), + }, + blocking=True, + ) + + assert len(calls) == 5 + assert calls[-1].data["r"] == 160 + assert calls[-1].data["g"] == 78 + assert calls[-1].data["b"] == 192 + assert calls[-1].data["cw"] == 25 + assert calls[-1].data["ww"] == 55 + + state = hass.states.get("light.test_template_light") + assert state.attributes["color_mode"] == ColorMode.RGBWW + assert state.attributes["color_temp"] is None + assert state.attributes["rgbww_color"] == (160, 78, 192, 25, 55) + assert state.attributes["supported_color_modes"] == [ + ColorMode.COLOR_TEMP, + ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, + ] + assert state.attributes["supported_features"] == 0 + + # Optimistically set hs color, light should again be in hs_color mode await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, @@ -958,7 +1497,7 @@ async def test_color_and_temperature_actions_no_template( blocking=True, ) - assert len(calls) == 3 + assert len(calls) == 6 assert calls[-1].data["h"] == 10 assert calls[-1].data["s"] == 20 @@ -969,6 +1508,9 @@ async def test_color_and_temperature_actions_no_template( assert state.attributes["supported_color_modes"] == [ ColorMode.COLOR_TEMP, ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, ] assert state.attributes["supported_features"] == 0 @@ -980,7 +1522,7 @@ async def test_color_and_temperature_actions_no_template( blocking=True, ) - assert len(calls) == 4 + assert len(calls) == 7 assert calls[-1].data["color_temp"] == 234 state = hass.states.get("light.test_template_light") @@ -990,6 +1532,9 @@ async def test_color_and_temperature_actions_no_template( assert state.attributes["supported_color_modes"] == [ ColorMode.COLOR_TEMP, ColorMode.HS, + ColorMode.RGB, + ColorMode.RGBW, + ColorMode.RGBWW, ] assert state.attributes["supported_features"] == 0 diff --git a/tests/components/template/test_weather.py b/tests/components/template/test_weather.py index 524f9c41aeb..36071c746da 100644 --- a/tests/components/template/test_weather.py +++ b/tests/components/template/test_weather.py @@ -18,7 +18,8 @@ from homeassistant.components.weather import ( ATTR_WEATHER_WIND_GUST_SPEED, ATTR_WEATHER_WIND_SPEED, DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, Forecast, ) from homeassistant.const import ATTR_ATTRIBUTION, STATE_UNAVAILABLE, STATE_UNKNOWN @@ -92,6 +93,13 @@ async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: assert state.attributes.get(v_attr) == value +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) @pytest.mark.parametrize(("count", "domain"), [(1, WEATHER_DOMAIN)]) @pytest.mark.parametrize( "config", @@ -114,7 +122,7 @@ async def test_template_state_text(hass: HomeAssistant, start_ha) -> None: ], ) async def test_forecasts( - hass: HomeAssistant, start_ha, snapshot: SnapshotAssertion + hass: HomeAssistant, start_ha, snapshot: SnapshotAssertion, service: str ) -> None: """Test forecast service.""" for attr, _v_attr, value in [ @@ -161,7 +169,7 @@ async def test_forecasts( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "daily"}, blocking=True, return_response=True, @@ -169,7 +177,7 @@ async def test_forecasts( assert response == snapshot response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "hourly"}, blocking=True, return_response=True, @@ -177,7 +185,7 @@ async def test_forecasts( assert response == snapshot response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "twice_daily"}, blocking=True, return_response=True, @@ -204,7 +212,7 @@ async def test_forecasts( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "daily"}, blocking=True, return_response=True, @@ -212,6 +220,13 @@ async def test_forecasts( assert response == snapshot +@pytest.mark.parametrize( + ("service", "expected"), + [ + (SERVICE_GET_FORECASTS, {"weather.forecast": {"forecast": []}}), + (LEGACY_SERVICE_GET_FORECAST, {"forecast": []}), + ], +) @pytest.mark.parametrize(("count", "domain"), [(1, WEATHER_DOMAIN)]) @pytest.mark.parametrize( "config", @@ -236,6 +251,8 @@ async def test_forecast_invalid( hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture, + service: str, + expected: dict[str, Any], ) -> None: """Test invalid forecasts.""" for attr, _v_attr, value in [ @@ -271,23 +288,30 @@ async def test_forecast_invalid( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "daily"}, blocking=True, return_response=True, ) - assert response == {"forecast": []} + assert response == expected response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "hourly"}, blocking=True, return_response=True, ) - assert response == {"forecast": []} + assert response == expected assert "Only valid keys in Forecast are allowed" in caplog.text +@pytest.mark.parametrize( + ("service", "expected"), + [ + (SERVICE_GET_FORECASTS, {"weather.forecast": {"forecast": []}}), + (LEGACY_SERVICE_GET_FORECAST, {"forecast": []}), + ], +) @pytest.mark.parametrize(("count", "domain"), [(1, WEATHER_DOMAIN)]) @pytest.mark.parametrize( "config", @@ -311,6 +335,8 @@ async def test_forecast_invalid_is_daytime_missing_in_twice_daily( hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture, + service: str, + expected: dict[str, Any], ) -> None: """Test forecast service invalid when is_daytime missing in twice_daily forecast.""" for attr, _v_attr, value in [ @@ -340,15 +366,22 @@ async def test_forecast_invalid_is_daytime_missing_in_twice_daily( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "twice_daily"}, blocking=True, return_response=True, ) - assert response == {"forecast": []} + assert response == expected assert "`is_daytime` is missing in twice_daily forecast" in caplog.text +@pytest.mark.parametrize( + ("service", "expected"), + [ + (SERVICE_GET_FORECASTS, {"weather.forecast": {"forecast": []}}), + (LEGACY_SERVICE_GET_FORECAST, {"forecast": []}), + ], +) @pytest.mark.parametrize(("count", "domain"), [(1, WEATHER_DOMAIN)]) @pytest.mark.parametrize( "config", @@ -372,6 +405,8 @@ async def test_forecast_invalid_datetime_missing( hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture, + service: str, + expected: dict[str, Any], ) -> None: """Test forecast service invalid when datetime missing.""" for attr, _v_attr, value in [ @@ -401,15 +436,22 @@ async def test_forecast_invalid_datetime_missing( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "twice_daily"}, blocking=True, return_response=True, ) - assert response == {"forecast": []} + assert response == expected assert "`datetime` is required in forecasts" in caplog.text +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) @pytest.mark.parametrize(("count", "domain"), [(1, WEATHER_DOMAIN)]) @pytest.mark.parametrize( "config", @@ -431,7 +473,7 @@ async def test_forecast_invalid_datetime_missing( ], ) async def test_forecast_format_error( - hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, start_ha, caplog: pytest.LogCaptureFixture, service: str ) -> None: """Test forecast service invalid on incorrect format.""" for attr, _v_attr, value in [ @@ -467,7 +509,7 @@ async def test_forecast_format_error( await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "daily"}, blocking=True, return_response=True, @@ -475,7 +517,7 @@ async def test_forecast_format_error( assert "Forecasts is not a list, see Weather documentation" in caplog.text await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, {"entity_id": "weather.forecast", "type": "hourly"}, blocking=True, return_response=True, @@ -638,6 +680,13 @@ async def test_trigger_action( assert state.context is context +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @pytest.mark.parametrize( "config", @@ -694,6 +743,7 @@ async def test_trigger_weather_services( start_ha, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, + service: str, ) -> None: """Test trigger weather entity with services.""" state = hass.states.get("weather.test") @@ -756,7 +806,7 @@ async def test_trigger_weather_services( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": state.entity_id, "type": "daily", @@ -768,7 +818,7 @@ async def test_trigger_weather_services( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": state.entity_id, "type": "hourly", @@ -780,7 +830,7 @@ async def test_trigger_weather_services( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": state.entity_id, "type": "twice_daily", diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index 3e84049efa8..90b06858e00 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -1,8 +1,10 @@ """Tests for the todo integration.""" from collections.abc import Generator +import datetime from typing import Any from unittest.mock import AsyncMock +import zoneinfo import pytest import voluptuous as vol @@ -13,11 +15,13 @@ from homeassistant.components.todo import ( TodoItemStatus, TodoListEntity, TodoListEntityFeature, + intent as todo_intent, ) from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import intent from homeassistant.helpers.entity_platform import AddEntitiesCallback from tests.common import ( @@ -31,12 +35,45 @@ from tests.common import ( from tests.typing import WebSocketGenerator TEST_DOMAIN = "test" +ITEM_1 = { + "uid": "1", + "summary": "Item #1", + "status": "needs_action", +} +ITEM_2 = { + "uid": "2", + "summary": "Item #2", + "status": "completed", +} +TEST_TIMEZONE = zoneinfo.ZoneInfo("America/Regina") +TEST_OFFSET = "-06:00" class MockFlow(ConfigFlow): """Test flow.""" +class MockTodoListEntity(TodoListEntity): + """Test todo list entity.""" + + def __init__(self, items: list[TodoItem] | None = None) -> None: + """Initialize entity.""" + self._attr_todo_items = items or [] + + @property + def items(self) -> list[TodoItem]: + """Return the items in the To-do list.""" + return self._attr_todo_items + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add an item to the To-do list.""" + self._attr_todo_items.append(item) + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete an item in the To-do list.""" + self._attr_todo_items = [item for item in self.items if item.uid not in uids] + + @pytest.fixture(autouse=True) def config_flow_fixture(hass: HomeAssistant) -> Generator[None, None, None]: """Mock config flow.""" @@ -75,6 +112,12 @@ def mock_setup_integration(hass: HomeAssistant) -> None: ) +@pytest.fixture(autouse=True) +def set_time_zone(hass: HomeAssistant) -> None: + """Set the time zone for the tests that keesp UTC-6 all year round.""" + hass.config.set_time_zone("America/Regina") + + async def create_mock_platform( hass: HomeAssistant, entities: list[TodoListEntity], @@ -106,7 +149,12 @@ async def create_mock_platform( @pytest.fixture(name="test_entity") def mock_test_entity() -> TodoListEntity: """Fixture that creates a test TodoList entity with mock service calls.""" - entity1 = TodoListEntity() + entity1 = MockTodoListEntity( + [ + TodoItem(summary="Item #1", uid="1", status=TodoItemStatus.NEEDS_ACTION), + TodoItem(summary="Item #2", uid="2", status=TodoItemStatus.COMPLETED), + ] + ) entity1.entity_id = "todo.entity1" entity1._attr_supported_features = ( TodoListEntityFeature.CREATE_TODO_ITEM @@ -114,13 +162,9 @@ def mock_test_entity() -> TodoListEntity: | TodoListEntityFeature.DELETE_TODO_ITEM | TodoListEntityFeature.MOVE_TODO_ITEM ) - entity1._attr_todo_items = [ - TodoItem(summary="Item #1", uid="1", status=TodoItemStatus.NEEDS_ACTION), - TodoItem(summary="Item #2", uid="2", status=TodoItemStatus.COMPLETED), - ] - entity1.async_create_todo_item = AsyncMock() + entity1.async_create_todo_item = AsyncMock(wraps=entity1.async_create_todo_item) entity1.async_update_todo_item = AsyncMock() - entity1.async_delete_todo_items = AsyncMock() + entity1.async_delete_todo_items = AsyncMock(wraps=entity1.async_delete_todo_items) entity1.async_move_todo_item = AsyncMock() return entity1 @@ -168,17 +212,68 @@ async def test_list_todo_items( assert resp.get("success") assert resp.get("result") == { "items": [ - {"summary": "Item #1", "uid": "1", "status": "needs_action"}, - {"summary": "Item #2", "uid": "2", "status": "completed"}, + ITEM_1, + ITEM_2, ] } +@pytest.mark.parametrize( + ("service_data", "expected_items"), + [ + ({}, [ITEM_1, ITEM_2]), + ( + [ + {"status": [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, + [ITEM_1, ITEM_2], + ] + ), + ( + [ + {"status": [TodoItemStatus.NEEDS_ACTION]}, + [ITEM_1], + ] + ), + ( + [ + {"status": [TodoItemStatus.COMPLETED]}, + [ITEM_2], + ] + ), + ], +) +async def test_get_items_service( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + test_entity: TodoListEntity, + service_data: dict[str, Any], + expected_items: list[dict[str, Any]], +) -> None: + """Test listing items in a To-do list from a service call.""" + + await create_mock_platform(hass, [test_entity]) + + state = hass.states.get("todo.entity1") + assert state + assert state.state == "1" + assert state.attributes == {"supported_features": 15} + + result = await hass.services.async_call( + DOMAIN, + "get_items", + service_data, + target={"entity_id": "todo.entity1"}, + blocking=True, + return_response=True, + ) + assert result == {"todo.entity1": {"items": expected_items}} + + async def test_unsupported_websocket( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, ) -> None: - """Test a To-do list that does not support features.""" + """Test a To-do list for an entity that does not exist.""" entity1 = TodoListEntity() entity1.entity_id = "todo.entity1" @@ -242,23 +337,42 @@ async def test_add_item_service_raises( @pytest.mark.parametrize( - ("item_data", "expected_error"), + ("item_data", "expected_exception", "expected_error"), [ - ({}, "required key not provided"), - ({"item": ""}, "length of value must be at least 1"), + ({}, vol.Invalid, "required key not provided"), + ({"item": ""}, vol.Invalid, "length of value must be at least 1"), + ( + {"item": "Submit forms", "description": "Submit tax forms"}, + ValueError, + "does not support setting field 'description'", + ), + ( + {"item": "Submit forms", "due_date": "2023-11-17"}, + ValueError, + "does not support setting field 'due_date'", + ), + ( + { + "item": "Submit forms", + "due_datetime": f"2023-11-17T17:00:00{TEST_OFFSET}", + }, + ValueError, + "does not support setting field 'due_datetime'", + ), ], ) async def test_add_item_service_invalid_input( hass: HomeAssistant, test_entity: TodoListEntity, item_data: dict[str, Any], + expected_exception: str, expected_error: str, ) -> None: """Test invalid input to the add item service.""" await create_mock_platform(hass, [test_entity]) - with pytest.raises(vol.Invalid, match=expected_error): + with pytest.raises(expected_exception, match=expected_error): await hass.services.async_call( DOMAIN, "add_item", @@ -268,6 +382,82 @@ async def test_add_item_service_invalid_input( ) +@pytest.mark.parametrize( + ("supported_entity_feature", "item_data", "expected_item"), + ( + ( + TodoListEntityFeature.SET_DUE_DATE_ON_ITEM, + {"item": "New item", "due_date": "2023-11-13"}, + TodoItem( + summary="New item", + status=TodoItemStatus.NEEDS_ACTION, + due=datetime.date(2023, 11, 13), + ), + ), + ( + TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, + {"item": "New item", "due_datetime": f"2023-11-13T17:00:00{TEST_OFFSET}"}, + TodoItem( + summary="New item", + status=TodoItemStatus.NEEDS_ACTION, + due=datetime.datetime(2023, 11, 13, 17, 00, 00, tzinfo=TEST_TIMEZONE), + ), + ), + ( + TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, + {"item": "New item", "due_datetime": "2023-11-13T17:00:00+00:00"}, + TodoItem( + summary="New item", + status=TodoItemStatus.NEEDS_ACTION, + due=datetime.datetime(2023, 11, 13, 11, 00, 00, tzinfo=TEST_TIMEZONE), + ), + ), + ( + TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, + {"item": "New item", "due_datetime": "2023-11-13"}, + TodoItem( + summary="New item", + status=TodoItemStatus.NEEDS_ACTION, + due=datetime.datetime(2023, 11, 13, 0, 00, 00, tzinfo=TEST_TIMEZONE), + ), + ), + ( + TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM, + {"item": "New item", "description": "Submit revised draft"}, + TodoItem( + summary="New item", + status=TodoItemStatus.NEEDS_ACTION, + description="Submit revised draft", + ), + ), + ), +) +async def test_add_item_service_extended_fields( + hass: HomeAssistant, + test_entity: TodoListEntity, + supported_entity_feature: int, + item_data: dict[str, Any], + expected_item: TodoItem, +) -> None: + """Test adding an item in a To-do list.""" + + test_entity._attr_supported_features |= supported_entity_feature + await create_mock_platform(hass, [test_entity]) + + await hass.services.async_call( + DOMAIN, + "add_item", + {"item": "New item", **item_data}, + target={"entity_id": "todo.entity1"}, + blocking=True, + ) + + args = test_entity.async_create_todo_item.call_args + assert args + item = args.kwargs.get("item") + assert item == expected_item + + async def test_update_todo_item_service_by_id( hass: HomeAssistant, test_entity: TodoListEntity, @@ -470,6 +660,82 @@ async def test_update_item_service_invalid_input( ) +@pytest.mark.parametrize( + ("update_data"), + [ + ({"due_datetime": f"2023-11-13T17:00:00{TEST_OFFSET}"}), + ({"due_date": "2023-11-13"}), + ({"description": "Submit revised draft"}), + ], +) +async def test_update_todo_item_field_unsupported( + hass: HomeAssistant, + test_entity: TodoListEntity, + update_data: dict[str, Any], +) -> None: + """Test updating an item in a To-do list.""" + + await create_mock_platform(hass, [test_entity]) + + with pytest.raises(ValueError, match="does not support"): + await hass.services.async_call( + DOMAIN, + "update_item", + {"item": "1", **update_data}, + target={"entity_id": "todo.entity1"}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("supported_entity_feature", "update_data", "expected_update"), + ( + ( + TodoListEntityFeature.SET_DUE_DATE_ON_ITEM, + {"due_date": "2023-11-13"}, + TodoItem(uid="1", due=datetime.date(2023, 11, 13)), + ), + ( + TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, + {"due_datetime": f"2023-11-13T17:00:00{TEST_OFFSET}"}, + TodoItem( + uid="1", + due=datetime.datetime(2023, 11, 13, 17, 0, 0, tzinfo=TEST_TIMEZONE), + ), + ), + ( + TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM, + {"description": "Submit revised draft"}, + TodoItem(uid="1", description="Submit revised draft"), + ), + ), +) +async def test_update_todo_item_extended_fields( + hass: HomeAssistant, + test_entity: TodoListEntity, + supported_entity_feature: int, + update_data: dict[str, Any], + expected_update: TodoItem, +) -> None: + """Test updating an item in a To-do list.""" + + test_entity._attr_supported_features |= supported_entity_feature + await create_mock_platform(hass, [test_entity]) + + await hass.services.async_call( + DOMAIN, + "update_item", + {"item": "1", **update_data}, + target={"entity_id": "todo.entity1"}, + blocking=True, + ) + + args = test_entity.async_update_todo_item.call_args + assert args + item = args.kwargs.get("item") + assert item == expected_update + + async def test_remove_todo_item_service_by_id( hass: HomeAssistant, test_entity: TodoListEntity, @@ -688,12 +954,16 @@ async def test_move_todo_item_service_invalid_input( "rename": "Updated item", }, ), + ( + "remove_completed_items", + None, + ), ], ) async def test_unsupported_service( hass: HomeAssistant, service_name: str, - payload: dict[str, Any], + payload: dict[str, Any] | None, ) -> None: """Test a To-do list that does not support features.""" @@ -737,3 +1007,294 @@ async def test_move_item_unsupported( resp = await client.receive_json() assert resp.get("id") == 1 assert resp.get("error", {}).get("code") == "not_supported" + + +async def test_add_item_intent( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test adding items to lists using an intent.""" + await todo_intent.async_setup_intents(hass) + + entity1 = MockTodoListEntity() + entity1._attr_name = "List 1" + entity1.entity_id = "todo.list_1" + + entity2 = MockTodoListEntity() + entity2._attr_name = "List 2" + entity2.entity_id = "todo.list_2" + + await create_mock_platform(hass, [entity1, entity2]) + + # Add to first list + response = await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {"item": {"value": "beer"}, "name": {"value": "list 1"}}, + ) + assert response.response_type == intent.IntentResponseType.ACTION_DONE + + assert len(entity1.items) == 1 + assert len(entity2.items) == 0 + assert entity1.items[0].summary == "beer" + entity1.items.clear() + + # Add to second list + response = await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {"item": {"value": "cheese"}, "name": {"value": "List 2"}}, + ) + assert response.response_type == intent.IntentResponseType.ACTION_DONE + + assert len(entity1.items) == 0 + assert len(entity2.items) == 1 + assert entity2.items[0].summary == "cheese" + + # List name is case insensitive + response = await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {"item": {"value": "wine"}, "name": {"value": "lIST 2"}}, + ) + assert response.response_type == intent.IntentResponseType.ACTION_DONE + + assert len(entity1.items) == 0 + assert len(entity2.items) == 2 + assert entity2.items[1].summary == "wine" + + # Missing list + with pytest.raises(intent.IntentHandleError): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {"item": {"value": "wine"}, "name": {"value": "This list does not exist"}}, + ) + + +async def test_remove_completed_items_service( + hass: HomeAssistant, + test_entity: TodoListEntity, +) -> None: + """Test remove completed todo items service.""" + await create_mock_platform(hass, [test_entity]) + + await hass.services.async_call( + DOMAIN, + "remove_completed_items", + target={"entity_id": "todo.entity1"}, + blocking=True, + ) + + args = test_entity.async_delete_todo_items.call_args + assert args + assert args.kwargs.get("uids") == ["2"] + + test_entity.async_delete_todo_items.reset_mock() + + # calling service multiple times will not call the entity method + await hass.services.async_call( + DOMAIN, + "remove_completed_items", + target={"entity_id": "todo.entity1"}, + blocking=True, + ) + test_entity.async_delete_todo_items.assert_not_called() + + +async def test_remove_completed_items_service_raises( + hass: HomeAssistant, + test_entity: TodoListEntity, +) -> None: + """Test removing all completed item from a To-do list that raises an error.""" + + await create_mock_platform(hass, [test_entity]) + + test_entity.async_delete_todo_items.side_effect = HomeAssistantError("Ooops") + with pytest.raises(HomeAssistantError, match="Ooops"): + await hass.services.async_call( + DOMAIN, + "remove_completed_items", + target={"entity_id": "todo.entity1"}, + blocking=True, + ) + + +async def test_subscribe( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + test_entity: TodoListEntity, +) -> None: + """Test subscribing to todo updates.""" + + await create_mock_platform(hass, [test_entity]) + + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "todo/item/subscribe", + "entity_id": test_entity.entity_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + subscription_id = msg["id"] + + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + event_message = msg["event"] + assert event_message == { + "items": [ + { + "summary": "Item #1", + "uid": "1", + "status": "needs_action", + "due": None, + "description": None, + }, + { + "summary": "Item #2", + "uid": "2", + "status": "completed", + "due": None, + "description": None, + }, + ] + } + test_entity._attr_todo_items = [ + *test_entity._attr_todo_items, + TodoItem(summary="Item #3", uid="3", status=TodoItemStatus.NEEDS_ACTION), + ] + + test_entity.async_write_ha_state() + msg = await client.receive_json() + event_message = msg["event"] + assert event_message == { + "items": [ + { + "summary": "Item #1", + "uid": "1", + "status": "needs_action", + "due": None, + "description": None, + }, + { + "summary": "Item #2", + "uid": "2", + "status": "completed", + "due": None, + "description": None, + }, + { + "summary": "Item #3", + "uid": "3", + "status": "needs_action", + "due": None, + "description": None, + }, + ] + } + + test_entity._attr_todo_items = None + test_entity.async_write_ha_state() + msg = await client.receive_json() + event_message = msg["event"] + assert event_message == { + "items": [], + } + + +async def test_subscribe_entity_does_not_exist( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + test_entity: TodoListEntity, +) -> None: + """Test failure to subscribe to an entity that does not exist.""" + + await create_mock_platform(hass, [test_entity]) + + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "todo/item/subscribe", + "entity_id": "todo.unknown", + } + ) + msg = await client.receive_json() + assert not msg["success"] + assert msg["error"] == { + "code": "invalid_entity_id", + "message": "To-do list entity not found: todo.unknown", + } + + +@pytest.mark.parametrize( + ("item_data", "expected_item_data"), + [ + ({"due": datetime.date(2023, 11, 17)}, {"due": "2023-11-17"}), + ( + {"due": datetime.datetime(2023, 11, 17, 17, 0, 0, tzinfo=TEST_TIMEZONE)}, + {"due": f"2023-11-17T17:00:00{TEST_OFFSET}"}, + ), + ({"description": "Some description"}, {"description": "Some description"}), + ], +) +async def test_list_todo_items_extended_fields( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + test_entity: TodoListEntity, + item_data: dict[str, Any], + expected_item_data: dict[str, Any], +) -> None: + """Test listing items in a To-do list with extended fields.""" + + test_entity._attr_todo_items = [ + TodoItem( + **ITEM_1, + **item_data, + ), + ] + await create_mock_platform(hass, [test_entity]) + + client = await hass_ws_client(hass) + await client.send_json( + {"id": 1, "type": "todo/item/list", "entity_id": "todo.entity1"} + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") + assert resp.get("result") == { + "items": [ + { + **ITEM_1, + **expected_item_data, + }, + ] + } + + result = await hass.services.async_call( + DOMAIN, + "get_items", + {}, + target={"entity_id": "todo.entity1"}, + blocking=True, + return_response=True, + ) + assert result == { + "todo.entity1": { + "items": [ + { + **ITEM_1, + **expected_item_data, + }, + ] + } + } diff --git a/tests/components/todoist/conftest.py b/tests/components/todoist/conftest.py index 28f22e1061a..4e4d41b6914 100644 --- a/tests/components/todoist/conftest.py +++ b/tests/components/todoist/conftest.py @@ -45,6 +45,7 @@ def make_api_task( is_completed: bool = False, due: Due | None = None, project_id: str | None = None, + description: str | None = None, ) -> Task: """Mock a todoist Task instance.""" return Task( @@ -55,8 +56,8 @@ def make_api_task( content=content or SUMMARY, created_at="2021-10-01T00:00:00", creator_id="1", - description="A task", - due=due or Due(is_recurring=False, date=TODAY, string="today"), + description=description, + due=due, id=id or "1", labels=["Label1"], order=1, diff --git a/tests/components/todoist/test_todo.py b/tests/components/todoist/test_todo.py index a14f362ea5b..aa00e2c2ff4 100644 --- a/tests/components/todoist/test_todo.py +++ b/tests/components/todoist/test_todo.py @@ -1,7 +1,9 @@ """Unit tests for the Todoist todo platform.""" +from typing import Any from unittest.mock import AsyncMock import pytest +from todoist_api_python.models import Due, Task from homeassistant.components.todo import DOMAIN as TODO_DOMAIN from homeassistant.const import Platform @@ -10,6 +12,8 @@ from homeassistant.helpers.entity_component import async_update_entity from .conftest import PROJECT_ID, make_api_task +from tests.typing import WebSocketGenerator + @pytest.fixture(autouse=True) def platforms() -> list[Platform]: @@ -17,6 +21,12 @@ def platforms() -> list[Platform]: return [Platform.TODO] +@pytest.fixture(autouse=True) +def set_time_zone(hass: HomeAssistant) -> None: + """Set the time zone for the tests that keesp UTC-6 all year round.""" + hass.config.set_time_zone("America/Regina") + + @pytest.mark.parametrize( ("tasks", "expected_state"), [ @@ -55,11 +65,91 @@ async def test_todo_item_state( assert state.state == expected_state -@pytest.mark.parametrize(("tasks"), [[]]) +@pytest.mark.parametrize( + ("tasks", "item_data", "tasks_after_update", "add_kwargs", "expected_item"), + [ + ( + [], + {}, + [make_api_task(id="task-id-1", content="Soda", is_completed=False)], + {"content": "Soda"}, + {"uid": "task-id-1", "summary": "Soda", "status": "needs_action"}, + ), + ( + [], + {"due_date": "2023-11-18"}, + [ + make_api_task( + id="task-id-1", + content="Soda", + is_completed=False, + due=Due(is_recurring=False, date="2023-11-18", string="today"), + ) + ], + {"due": {"date": "2023-11-18"}}, + { + "uid": "task-id-1", + "summary": "Soda", + "status": "needs_action", + "due": "2023-11-18", + }, + ), + ( + [], + {"due_datetime": "2023-11-18T06:30:00"}, + [ + make_api_task( + id="task-id-1", + content="Soda", + is_completed=False, + due=Due( + date="2023-11-18", + is_recurring=False, + datetime="2023-11-18T12:30:00.000000Z", + string="today", + ), + ) + ], + { + "due": {"date": "2023-11-18", "datetime": "2023-11-18T06:30:00-06:00"}, + }, + { + "uid": "task-id-1", + "summary": "Soda", + "status": "needs_action", + "due": "2023-11-18T06:30:00-06:00", + }, + ), + ( + [], + {"description": "6-pack"}, + [ + make_api_task( + id="task-id-1", + content="Soda", + description="6-pack", + is_completed=False, + ) + ], + {"description": "6-pack"}, + { + "uid": "task-id-1", + "summary": "Soda", + "status": "needs_action", + "description": "6-pack", + }, + ), + ], + ids=["summary", "due_date", "due_datetime", "description"], +) async def test_add_todo_list_item( hass: HomeAssistant, setup_integration: None, api: AsyncMock, + item_data: dict[str, Any], + tasks_after_update: list[Task], + add_kwargs: dict[str, Any], + expected_item: dict[str, Any], ) -> None: """Test for adding a To-do Item.""" @@ -69,28 +159,35 @@ async def test_add_todo_list_item( api.add_task = AsyncMock() # Fake API response when state is refreshed after create - api.get_tasks.return_value = [ - make_api_task(id="task-id-1", content="Soda", is_completed=False) - ] + api.get_tasks.return_value = tasks_after_update await hass.services.async_call( TODO_DOMAIN, "add_item", - {"item": "Soda"}, + {"item": "Soda", **item_data}, target={"entity_id": "todo.name"}, blocking=True, ) args = api.add_task.call_args assert args - assert args.kwargs.get("content") == "Soda" - assert args.kwargs.get("project_id") == PROJECT_ID + assert args.kwargs == {"project_id": PROJECT_ID, "content": "Soda", **add_kwargs} # Verify state is refreshed state = hass.states.get("todo.name") assert state assert state.state == "1" + result = await hass.services.async_call( + TODO_DOMAIN, + "get_items", + {}, + target={"entity_id": "todo.name"}, + blocking=True, + return_response=True, + ) + assert result == {"todo.name": {"items": [expected_item]}} + @pytest.mark.parametrize( ("tasks"), [[make_api_task(id="task-id-1", content="Soda", is_completed=False)]] @@ -156,12 +253,91 @@ async def test_update_todo_item_status( @pytest.mark.parametrize( - ("tasks"), [[make_api_task(id="task-id-1", content="Soda", is_completed=False)]] + ("tasks", "update_data", "tasks_after_update", "update_kwargs", "expected_item"), + [ + ( + [make_api_task(id="task-id-1", content="Soda", is_completed=False)], + {"rename": "Milk"}, + [make_api_task(id="task-id-1", content="Milk", is_completed=False)], + {"task_id": "task-id-1", "content": "Milk"}, + {"uid": "task-id-1", "summary": "Milk", "status": "needs_action"}, + ), + ( + [make_api_task(id="task-id-1", content="Soda", is_completed=False)], + {"due_date": "2023-11-18"}, + [ + make_api_task( + id="task-id-1", + content="Soda", + is_completed=False, + due=Due(is_recurring=False, date="2023-11-18", string="today"), + ) + ], + {"task_id": "task-id-1", "due": {"date": "2023-11-18"}}, + { + "uid": "task-id-1", + "summary": "Soda", + "status": "needs_action", + "due": "2023-11-18", + }, + ), + ( + [make_api_task(id="task-id-1", content="Soda", is_completed=False)], + {"due_datetime": "2023-11-18T06:30:00"}, + [ + make_api_task( + id="task-id-1", + content="Soda", + is_completed=False, + due=Due( + date="2023-11-18", + is_recurring=False, + datetime="2023-11-18T12:30:00.000000Z", + string="today", + ), + ) + ], + { + "task_id": "task-id-1", + "due": {"date": "2023-11-18", "datetime": "2023-11-18T06:30:00-06:00"}, + }, + { + "uid": "task-id-1", + "summary": "Soda", + "status": "needs_action", + "due": "2023-11-18T06:30:00-06:00", + }, + ), + ( + [make_api_task(id="task-id-1", content="Soda", is_completed=False)], + {"description": "6-pack"}, + [ + make_api_task( + id="task-id-1", + content="Soda", + description="6-pack", + is_completed=False, + ) + ], + {"task_id": "task-id-1", "description": "6-pack"}, + { + "uid": "task-id-1", + "summary": "Soda", + "status": "needs_action", + "description": "6-pack", + }, + ), + ], + ids=["rename", "due_date", "due_datetime", "description"], ) -async def test_update_todo_item_summary( +async def test_update_todo_items( hass: HomeAssistant, setup_integration: None, api: AsyncMock, + update_data: dict[str, Any], + tasks_after_update: list[Task], + update_kwargs: dict[str, Any], + expected_item: dict[str, Any], ) -> None: """Test for updating a To-do Item that changes the summary.""" @@ -172,22 +348,29 @@ async def test_update_todo_item_summary( api.update_task = AsyncMock() # Fake API response when state is refreshed after close - api.get_tasks.return_value = [ - make_api_task(id="task-id-1", content="Soda", is_completed=True) - ] + api.get_tasks.return_value = tasks_after_update await hass.services.async_call( TODO_DOMAIN, "update_item", - {"item": "task-id-1", "rename": "Milk"}, + {"item": "task-id-1", **update_data}, target={"entity_id": "todo.name"}, blocking=True, ) assert api.update_task.called args = api.update_task.call_args assert args - assert args.kwargs.get("task_id") == "task-id-1" - assert args.kwargs.get("content") == "Milk" + assert args.kwargs == update_kwargs + + result = await hass.services.async_call( + TODO_DOMAIN, + "get_items", + {}, + target={"entity_id": "todo.name"}, + blocking=True, + return_response=True, + ) + assert result == {"todo.name": {"items": [expected_item]}} @pytest.mark.parametrize( @@ -230,3 +413,61 @@ async def test_remove_todo_item( state = hass.states.get("todo.name") assert state assert state.state == "0" + + +@pytest.mark.parametrize( + ("tasks"), [[make_api_task(id="task-id-1", content="Cheese", is_completed=False)]] +) +async def test_subscribe( + hass: HomeAssistant, + setup_integration: None, + api: AsyncMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for subscribing to state updates.""" + + # Subscribe and get the initial list + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "todo/item/subscribe", + "entity_id": "todo.name", + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + subscription_id = msg["id"] + + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "Cheese" + assert items[0]["status"] == "needs_action" + assert items[0]["uid"] + + # Fake API response when state is refreshed + api.get_tasks.return_value = [ + make_api_task(id="test-id-1", content="Wine", is_completed=False) + ] + await hass.services.async_call( + TODO_DOMAIN, + "update_item", + {"item": "Cheese", "rename": "Wine"}, + target={"entity_id": "todo.name"}, + blocking=True, + ) + + # Verify update is published + msg = await client.receive_json() + assert msg["id"] == subscription_id + assert msg["type"] == "event" + items = msg["event"].get("items") + assert items + assert len(items) == 1 + assert items[0]["summary"] == "Wine" + assert items[0]["status"] == "needs_action" + assert items[0]["uid"] diff --git a/tests/components/tomato/test_device_tracker.py b/tests/components/tomato/test_device_tracker.py index 7c187c7b4bb..11e73b5695c 100644 --- a/tests/components/tomato/test_device_tracker.py +++ b/tests/components/tomato/test_device_tracker.py @@ -157,7 +157,7 @@ def test_config_verify_ssl_but_no_ssl_enabled( assert "_http_id=1234567890" in result.req.body assert "exec=devlist" in result.req.body assert mock_session_send.call_count == 1 - assert mock_session_send.mock_calls[0] == mock.call(result.req, timeout=3) + assert mock_session_send.mock_calls[0] == mock.call(result.req, timeout=60) @mock.patch("os.access", return_value=True) @@ -192,7 +192,7 @@ def test_config_valid_verify_ssl_path(hass: HomeAssistant, mock_session_send) -> assert "exec=devlist" in result.req.body assert mock_session_send.call_count == 1 assert mock_session_send.mock_calls[0] == mock.call( - result.req, timeout=3, verify="/test/tomato.crt" + result.req, timeout=60, verify="/test/tomato.crt" ) @@ -223,7 +223,7 @@ def test_config_valid_verify_ssl_bool(hass: HomeAssistant, mock_session_send) -> assert "exec=devlist" in result.req.body assert mock_session_send.call_count == 1 assert mock_session_send.mock_calls[0] == mock.call( - result.req, timeout=3, verify=False + result.req, timeout=60, verify=False ) diff --git a/tests/components/tomorrowio/snapshots/test_weather.ambr b/tests/components/tomorrowio/snapshots/test_weather.ambr index a938cb10e44..fe65925e4c7 100644 --- a/tests/components/tomorrowio/snapshots/test_weather.ambr +++ b/tests/components/tomorrowio/snapshots/test_weather.ambr @@ -1107,3 +1107,1127 @@ ]), }) # --- +# name: test_v4_forecast_service[forecast] + dict({ + 'weather.tomorrow_io_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T11:00:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.9, + 'templow': 26.1, + 'wind_bearing': 239.6, + 'wind_speed': 34.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 49.4, + 'templow': 26.3, + 'wind_bearing': 262.82, + 'wind_speed': 26.06, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-09T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 67.0, + 'templow': 31.5, + 'wind_bearing': 229.3, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 65.3, + 'templow': 37.3, + 'wind_bearing': 149.91, + 'wind_speed': 38.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-11T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 66.2, + 'templow': 48.3, + 'wind_bearing': 210.45, + 'wind_speed': 56.48, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-12T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 67.9, + 'templow': 53.8, + 'wind_bearing': 217.98, + 'wind_speed': 44.28, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-13T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 54.5, + 'templow': 42.9, + 'wind_bearing': 58.79, + 'wind_speed': 34.99, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-14T10:00:00+00:00', + 'precipitation': 0.94, + 'precipitation_probability': 95, + 'temperature': 42.9, + 'templow': 33.4, + 'wind_bearing': 70.25, + 'wind_speed': 58.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-15T10:00:00+00:00', + 'precipitation': 0.06, + 'precipitation_probability': 55, + 'temperature': 43.7, + 'templow': 29.4, + 'wind_bearing': 84.47, + 'wind_speed': 57.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-16T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 43.0, + 'templow': 29.1, + 'wind_bearing': 103.85, + 'wind_speed': 24.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-17T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 52.4, + 'templow': 34.3, + 'wind_bearing': 145.41, + 'wind_speed': 26.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-18T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 54.1, + 'templow': 41.3, + 'wind_bearing': 62.99, + 'wind_speed': 23.69, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-19T10:00:00+00:00', + 'precipitation': 0.12, + 'precipitation_probability': 55, + 'temperature': 48.9, + 'templow': 39.4, + 'wind_bearing': 68.54, + 'wind_speed': 50.08, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-20T10:00:00+00:00', + 'precipitation': 0.05, + 'precipitation_probability': 33, + 'temperature': 40.1, + 'templow': 35.1, + 'wind_bearing': 56.98, + 'wind_speed': 62.46, + }), + ]), + }), + }) +# --- +# name: test_v4_forecast_service[forecast].1 + dict({ + 'weather.tomorrow_io_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T17:48:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.1, + 'wind_bearing': 315.14, + 'wind_speed': 33.59, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T18:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.8, + 'wind_bearing': 321.71, + 'wind_speed': 31.82, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T19:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.8, + 'wind_bearing': 323.38, + 'wind_speed': 32.04, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T20:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.3, + 'wind_bearing': 318.43, + 'wind_speed': 33.73, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T21:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.6, + 'wind_bearing': 320.9, + 'wind_speed': 28.98, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T22:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 41.9, + 'wind_bearing': 322.11, + 'wind_speed': 15.7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T23:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 38.9, + 'wind_bearing': 295.94, + 'wind_speed': 17.78, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T00:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 36.2, + 'wind_bearing': 11.94, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T01:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 34.3, + 'wind_bearing': 13.68, + 'wind_speed': 20.05, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T02:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 32.9, + 'wind_bearing': 14.93, + 'wind_speed': 19.48, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T03:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.9, + 'wind_bearing': 26.07, + 'wind_speed': 16.6, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T04:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 51.27, + 'wind_speed': 9.32, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T05:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.4, + 'wind_bearing': 343.25, + 'wind_speed': 11.92, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T06:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.7, + 'wind_bearing': 341.46, + 'wind_speed': 15.37, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T07:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.4, + 'wind_bearing': 322.34, + 'wind_speed': 12.71, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T08:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.1, + 'wind_bearing': 294.69, + 'wind_speed': 13.14, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T09:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 30.1, + 'wind_bearing': 325.32, + 'wind_speed': 11.52, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T10:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.0, + 'wind_bearing': 322.27, + 'wind_speed': 10.22, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T11:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.2, + 'wind_bearing': 310.14, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T12:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 324.8, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T13:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 33.2, + 'wind_bearing': 335.16, + 'wind_speed': 23.26, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T14:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 37.0, + 'wind_bearing': 324.49, + 'wind_speed': 21.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T15:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 40.0, + 'wind_bearing': 310.68, + 'wind_speed': 19.98, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T16:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 42.4, + 'wind_bearing': 304.18, + 'wind_speed': 19.66, + }), + ]), + }), + }) +# --- +# name: test_v4_forecast_service[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T11:00:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.9, + 'templow': 26.1, + 'wind_bearing': 239.6, + 'wind_speed': 34.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 49.4, + 'templow': 26.3, + 'wind_bearing': 262.82, + 'wind_speed': 26.06, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-09T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 67.0, + 'templow': 31.5, + 'wind_bearing': 229.3, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 65.3, + 'templow': 37.3, + 'wind_bearing': 149.91, + 'wind_speed': 38.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-11T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 66.2, + 'templow': 48.3, + 'wind_bearing': 210.45, + 'wind_speed': 56.48, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-12T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 67.9, + 'templow': 53.8, + 'wind_bearing': 217.98, + 'wind_speed': 44.28, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-13T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 54.5, + 'templow': 42.9, + 'wind_bearing': 58.79, + 'wind_speed': 34.99, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-14T10:00:00+00:00', + 'precipitation': 0.94, + 'precipitation_probability': 95, + 'temperature': 42.9, + 'templow': 33.4, + 'wind_bearing': 70.25, + 'wind_speed': 58.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-15T10:00:00+00:00', + 'precipitation': 0.06, + 'precipitation_probability': 55, + 'temperature': 43.7, + 'templow': 29.4, + 'wind_bearing': 84.47, + 'wind_speed': 57.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-16T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 43.0, + 'templow': 29.1, + 'wind_bearing': 103.85, + 'wind_speed': 24.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-17T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 52.4, + 'templow': 34.3, + 'wind_bearing': 145.41, + 'wind_speed': 26.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-18T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 54.1, + 'templow': 41.3, + 'wind_bearing': 62.99, + 'wind_speed': 23.69, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-19T10:00:00+00:00', + 'precipitation': 0.12, + 'precipitation_probability': 55, + 'temperature': 48.9, + 'templow': 39.4, + 'wind_bearing': 68.54, + 'wind_speed': 50.08, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-20T10:00:00+00:00', + 'precipitation': 0.05, + 'precipitation_probability': 33, + 'temperature': 40.1, + 'templow': 35.1, + 'wind_bearing': 56.98, + 'wind_speed': 62.46, + }), + ]), + }) +# --- +# name: test_v4_forecast_service[get_forecast].1 + dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T17:48:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.1, + 'wind_bearing': 315.14, + 'wind_speed': 33.59, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T18:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.8, + 'wind_bearing': 321.71, + 'wind_speed': 31.82, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T19:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.8, + 'wind_bearing': 323.38, + 'wind_speed': 32.04, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T20:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.3, + 'wind_bearing': 318.43, + 'wind_speed': 33.73, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T21:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.6, + 'wind_bearing': 320.9, + 'wind_speed': 28.98, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T22:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 41.9, + 'wind_bearing': 322.11, + 'wind_speed': 15.7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T23:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 38.9, + 'wind_bearing': 295.94, + 'wind_speed': 17.78, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T00:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 36.2, + 'wind_bearing': 11.94, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T01:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 34.3, + 'wind_bearing': 13.68, + 'wind_speed': 20.05, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T02:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 32.9, + 'wind_bearing': 14.93, + 'wind_speed': 19.48, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T03:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.9, + 'wind_bearing': 26.07, + 'wind_speed': 16.6, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T04:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 51.27, + 'wind_speed': 9.32, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T05:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.4, + 'wind_bearing': 343.25, + 'wind_speed': 11.92, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T06:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.7, + 'wind_bearing': 341.46, + 'wind_speed': 15.37, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T07:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.4, + 'wind_bearing': 322.34, + 'wind_speed': 12.71, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T08:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.1, + 'wind_bearing': 294.69, + 'wind_speed': 13.14, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T09:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 30.1, + 'wind_bearing': 325.32, + 'wind_speed': 11.52, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T10:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.0, + 'wind_bearing': 322.27, + 'wind_speed': 10.22, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T11:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.2, + 'wind_bearing': 310.14, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T12:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 324.8, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T13:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 33.2, + 'wind_bearing': 335.16, + 'wind_speed': 23.26, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T14:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 37.0, + 'wind_bearing': 324.49, + 'wind_speed': 21.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T15:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 40.0, + 'wind_bearing': 310.68, + 'wind_speed': 19.98, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T16:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 42.4, + 'wind_bearing': 304.18, + 'wind_speed': 19.66, + }), + ]), + }) +# --- +# name: test_v4_forecast_service[get_forecasts] + dict({ + 'weather.tomorrow_io_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T11:00:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.9, + 'templow': 26.1, + 'wind_bearing': 239.6, + 'wind_speed': 34.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 49.4, + 'templow': 26.3, + 'wind_bearing': 262.82, + 'wind_speed': 26.06, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-09T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 67.0, + 'templow': 31.5, + 'wind_bearing': 229.3, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-10T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 65.3, + 'templow': 37.3, + 'wind_bearing': 149.91, + 'wind_speed': 38.3, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-11T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 66.2, + 'templow': 48.3, + 'wind_bearing': 210.45, + 'wind_speed': 56.48, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-12T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 67.9, + 'templow': 53.8, + 'wind_bearing': 217.98, + 'wind_speed': 44.28, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-13T11:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 25, + 'temperature': 54.5, + 'templow': 42.9, + 'wind_bearing': 58.79, + 'wind_speed': 34.99, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-14T10:00:00+00:00', + 'precipitation': 0.94, + 'precipitation_probability': 95, + 'temperature': 42.9, + 'templow': 33.4, + 'wind_bearing': 70.25, + 'wind_speed': 58.5, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-15T10:00:00+00:00', + 'precipitation': 0.06, + 'precipitation_probability': 55, + 'temperature': 43.7, + 'templow': 29.4, + 'wind_bearing': 84.47, + 'wind_speed': 57.2, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-16T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 43.0, + 'templow': 29.1, + 'wind_bearing': 103.85, + 'wind_speed': 24.16, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-17T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 52.4, + 'templow': 34.3, + 'wind_bearing': 145.41, + 'wind_speed': 26.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-18T10:00:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 10, + 'temperature': 54.1, + 'templow': 41.3, + 'wind_bearing': 62.99, + 'wind_speed': 23.69, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2021-03-19T10:00:00+00:00', + 'precipitation': 0.12, + 'precipitation_probability': 55, + 'temperature': 48.9, + 'templow': 39.4, + 'wind_bearing': 68.54, + 'wind_speed': 50.08, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2021-03-20T10:00:00+00:00', + 'precipitation': 0.05, + 'precipitation_probability': 33, + 'temperature': 40.1, + 'templow': 35.1, + 'wind_bearing': 56.98, + 'wind_speed': 62.46, + }), + ]), + }), + }) +# --- +# name: test_v4_forecast_service[get_forecasts].1 + dict({ + 'weather.tomorrow_io_daily': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T17:48:00+00:00', + 'dew_point': 12.8, + 'humidity': 58, + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.1, + 'wind_bearing': 315.14, + 'wind_speed': 33.59, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T18:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.8, + 'wind_bearing': 321.71, + 'wind_speed': 31.82, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T19:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.8, + 'wind_bearing': 323.38, + 'wind_speed': 32.04, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T20:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 45.3, + 'wind_bearing': 318.43, + 'wind_speed': 33.73, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T21:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 44.6, + 'wind_bearing': 320.9, + 'wind_speed': 28.98, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T22:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 41.9, + 'wind_bearing': 322.11, + 'wind_speed': 15.7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-07T23:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 38.9, + 'wind_bearing': 295.94, + 'wind_speed': 17.78, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T00:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 36.2, + 'wind_bearing': 11.94, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2021-03-08T01:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 34.3, + 'wind_bearing': 13.68, + 'wind_speed': 20.05, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T02:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 32.9, + 'wind_bearing': 14.93, + 'wind_speed': 19.48, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T03:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.9, + 'wind_bearing': 26.07, + 'wind_speed': 16.6, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T04:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 51.27, + 'wind_speed': 9.32, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T05:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.4, + 'wind_bearing': 343.25, + 'wind_speed': 11.92, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T06:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.7, + 'wind_bearing': 341.46, + 'wind_speed': 15.37, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T07:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.4, + 'wind_bearing': 322.34, + 'wind_speed': 12.71, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T08:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 26.1, + 'wind_bearing': 294.69, + 'wind_speed': 13.14, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T09:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 30.1, + 'wind_bearing': 325.32, + 'wind_speed': 11.52, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T10:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 31.0, + 'wind_bearing': 322.27, + 'wind_speed': 10.22, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T11:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 27.2, + 'wind_bearing': 310.14, + 'wind_speed': 20.12, + }), + dict({ + 'condition': 'clear-night', + 'datetime': '2021-03-08T12:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 29.2, + 'wind_bearing': 324.8, + 'wind_speed': 25.38, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T13:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 33.2, + 'wind_bearing': 335.16, + 'wind_speed': 23.26, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T14:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 37.0, + 'wind_bearing': 324.49, + 'wind_speed': 21.17, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2021-03-08T15:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 40.0, + 'wind_bearing': 310.68, + 'wind_speed': 19.98, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2021-03-08T16:48:00+00:00', + 'precipitation': 0.0, + 'precipitation_probability': 0, + 'temperature': 42.4, + 'wind_bearing': 304.18, + 'wind_speed': 19.66, + }), + ]), + }), + }) +# --- diff --git a/tests/components/tomorrowio/test_weather.py b/tests/components/tomorrowio/test_weather.py index 863623ee524..e715fccea6b 100644 --- a/tests/components/tomorrowio/test_weather.py +++ b/tests/components/tomorrowio/test_weather.py @@ -46,7 +46,8 @@ from homeassistant.components.weather import ( ATTR_WEATHER_WIND_SPEED, ATTR_WEATHER_WIND_SPEED_UNIT, DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, ) from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, SOURCE_USER from homeassistant.const import ATTR_ATTRIBUTION, ATTR_FRIENDLY_NAME, CONF_NAME @@ -277,10 +278,18 @@ async def test_v4_weather_legacy_entities(hass: HomeAssistant) -> None: assert weather_state.attributes[ATTR_WEATHER_WIND_SPEED_UNIT] == "km/h" +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) @freeze_time(datetime(2021, 3, 6, 23, 59, 59, tzinfo=dt_util.UTC)) async def test_v4_forecast_service( hass: HomeAssistant, snapshot: SnapshotAssertion, + service: str, ) -> None: """Test multiple forecast.""" weather_state = await _setup(hass, API_V4_ENTRY_DATA) @@ -289,7 +298,7 @@ async def test_v4_forecast_service( for forecast_type in ("daily", "hourly"): response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": entity_id, "type": forecast_type, @@ -297,10 +306,40 @@ async def test_v4_forecast_service( blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot +async def test_legacy_v4_bad_forecast( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + tomorrowio_config_entry_update, + snapshot: SnapshotAssertion, +) -> None: + """Test bad forecast data.""" + freezer.move_to(datetime(2021, 3, 6, 23, 59, 59, tzinfo=dt_util.UTC)) + + weather_state = await _setup(hass, API_V4_ENTRY_DATA) + entity_id = weather_state.entity_id + hourly_forecast = tomorrowio_config_entry_update.return_value["forecasts"]["hourly"] + hourly_forecast[0]["values"]["precipitationProbability"] = "blah" + + # Trigger data refetch + freezer.tick(timedelta(minutes=32) + timedelta(seconds=1)) + await hass.async_block_till_done() + + response = await hass.services.async_call( + WEATHER_DOMAIN, + LEGACY_SERVICE_GET_FORECAST, + { + "entity_id": entity_id, + "type": "hourly", + }, + blocking=True, + return_response=True, + ) + assert response["forecast"][0]["precipitation_probability"] is None + + async def test_v4_bad_forecast( hass: HomeAssistant, freezer: FrozenDateTimeFactory, @@ -321,7 +360,7 @@ async def test_v4_bad_forecast( response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, { "entity_id": entity_id, "type": "hourly", @@ -329,7 +368,12 @@ async def test_v4_bad_forecast( blocking=True, return_response=True, ) - assert response["forecast"][0]["precipitation_probability"] is None + assert ( + response["weather.tomorrow_io_daily"]["forecast"][0][ + "precipitation_probability" + ] + is None + ) @pytest.mark.parametrize("forecast_type", ["daily", "hourly"]) diff --git a/tests/components/trafikverket_camera/__init__.py b/tests/components/trafikverket_camera/__init__.py index 026c122fb57..a9aa3ad70d1 100644 --- a/tests/components/trafikverket_camera/__init__.py +++ b/tests/components/trafikverket_camera/__init__.py @@ -2,9 +2,14 @@ from __future__ import annotations from homeassistant.components.trafikverket_camera.const import CONF_LOCATION -from homeassistant.const import CONF_API_KEY +from homeassistant.const import CONF_API_KEY, CONF_ID ENTRY_CONFIG = { + CONF_API_KEY: "1234567890", + CONF_ID: "1234", +} + +ENTRY_CONFIG_OLD_CONFIG = { CONF_API_KEY: "1234567890", CONF_LOCATION: "Test location", } diff --git a/tests/components/trafikverket_camera/conftest.py b/tests/components/trafikverket_camera/conftest.py index a4902ac2950..a5eeb707b34 100644 --- a/tests/components/trafikverket_camera/conftest.py +++ b/tests/components/trafikverket_camera/conftest.py @@ -32,9 +32,9 @@ async def load_integration_from_entry( source=SOURCE_USER, data=ENTRY_CONFIG, entry_id="1", - version=2, + version=3, unique_id="trafikverket_camera-1234", - title="Test location", + title="Test Camera", ) config_entry.add_to_hass(hass) @@ -54,7 +54,7 @@ def fixture_get_camera() -> CameraInfo: """Construct Camera Mock.""" return CameraInfo( - camera_name="Test_camera", + camera_name="Test Camera", camera_id="1234", active=True, deleted=False, diff --git a/tests/components/trafikverket_camera/test_binary_sensor.py b/tests/components/trafikverket_camera/test_binary_sensor.py index 6f7eb540289..87d0e6d58b7 100644 --- a/tests/components/trafikverket_camera/test_binary_sensor.py +++ b/tests/components/trafikverket_camera/test_binary_sensor.py @@ -16,5 +16,5 @@ async def test_sensor( ) -> None: """Test the Trafikverket Camera binary sensor.""" - state = hass.states.get("binary_sensor.test_location_active") + state = hass.states.get("binary_sensor.test_camera_active") assert state.state == STATE_ON diff --git a/tests/components/trafikverket_camera/test_camera.py b/tests/components/trafikverket_camera/test_camera.py index b3df7cfcdcb..182924e9f0e 100644 --- a/tests/components/trafikverket_camera/test_camera.py +++ b/tests/components/trafikverket_camera/test_camera.py @@ -26,7 +26,7 @@ async def test_camera( get_camera: CameraInfo, ) -> None: """Test the Trafikverket Camera sensor.""" - state1 = hass.states.get("camera.test_location") + state1 = hass.states.get("camera.test_camera") assert state1.state == "idle" assert state1.attributes["description"] == "Test Camera for testing" assert state1.attributes["location"] == "Test location" @@ -44,11 +44,11 @@ async def test_camera( async_fire_time_changed(hass) await hass.async_block_till_done() - state1 = hass.states.get("camera.test_location") + state1 = hass.states.get("camera.test_camera") assert state1.state == "idle" assert state1.attributes != {} - assert await async_get_image(hass, "camera.test_location") + assert await async_get_image(hass, "camera.test_camera") monkeypatch.setattr( get_camera, @@ -69,4 +69,4 @@ async def test_camera( await hass.async_block_till_done() with pytest.raises(HomeAssistantError): - await async_get_image(hass, "camera.test_location") + await async_get_image(hass, "camera.test_camera") diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index b53763c0ac7..305066832e5 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -14,7 +14,7 @@ from pytrafikverket.trafikverket_camera import CameraInfo from homeassistant import config_entries from homeassistant.components.trafikverket_camera.const import CONF_LOCATION, DOMAIN -from homeassistant.const import CONF_API_KEY +from homeassistant.const import CONF_API_KEY, CONF_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -47,10 +47,10 @@ async def test_form(hass: HomeAssistant, get_camera: CameraInfo) -> None: await hass.async_block_till_done() assert result2["type"] == FlowResultType.CREATE_ENTRY - assert result2["title"] == "Test location" + assert result2["title"] == "Test Camera" assert result2["data"] == { "api_key": "1234567890", - "location": "Test location", + "id": "1234", } assert len(mock_setup_entry.mock_calls) == 1 assert result2["result"].unique_id == "trafikverket_camera-1234" @@ -87,7 +87,7 @@ async def test_form_no_location_data( assert result2["title"] == "Test Camera" assert result2["data"] == { "api_key": "1234567890", - "location": "Test Camera", + "id": "1234", } assert len(mock_setup_entry.mock_calls) == 1 assert result2["result"].unique_id == "trafikverket_camera-1234" @@ -150,10 +150,10 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: domain=DOMAIN, data={ CONF_API_KEY: "1234567890", - CONF_LOCATION: "Test location", + CONF_ID: "1234", }, unique_id="1234", - version=2, + version=3, ) entry.add_to_hass(hass) @@ -186,7 +186,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: assert result2["reason"] == "reauth_successful" assert entry.data == { "api_key": "1234567891", - "location": "Test location", + "id": "1234", } @@ -223,10 +223,10 @@ async def test_reauth_flow_error( domain=DOMAIN, data={ CONF_API_KEY: "1234567890", - CONF_LOCATION: "Test location", + CONF_ID: "1234", }, unique_id="1234", - version=2, + version=3, ) entry.add_to_hass(hass) await hass.async_block_till_done() @@ -271,5 +271,5 @@ async def test_reauth_flow_error( assert result2["reason"] == "reauth_successful" assert entry.data == { "api_key": "1234567891", - "location": "Test location", + "id": "1234", } diff --git a/tests/components/trafikverket_camera/test_coordinator.py b/tests/components/trafikverket_camera/test_coordinator.py index 4183aa9fffa..0f79307e0b6 100644 --- a/tests/components/trafikverket_camera/test_coordinator.py +++ b/tests/components/trafikverket_camera/test_coordinator.py @@ -40,9 +40,9 @@ async def test_coordinator( source=SOURCE_USER, data=ENTRY_CONFIG, entry_id="1", - version=2, + version=3, unique_id="trafikverket_camera-1234", - title="Test location", + title="Test Camera", ) entry.add_to_hass(hass) @@ -54,7 +54,7 @@ async def test_coordinator( await hass.async_block_till_done() mock_data.assert_called_once() - state1 = hass.states.get("camera.test_location") + state1 = hass.states.get("camera.test_camera") assert state1.state == "idle" @@ -101,9 +101,9 @@ async def test_coordinator_failed_update( source=SOURCE_USER, data=ENTRY_CONFIG, entry_id="1", - version=2, + version=3, unique_id="trafikverket_camera-1234", - title="Test location", + title="Test Camera", ) entry.add_to_hass(hass) @@ -115,7 +115,7 @@ async def test_coordinator_failed_update( await hass.async_block_till_done() mock_data.assert_called_once() - state = hass.states.get("camera.test_location") + state = hass.states.get("camera.test_camera") assert state is None assert entry.state == entry_state @@ -135,7 +135,7 @@ async def test_coordinator_failed_get_image( source=SOURCE_USER, data=ENTRY_CONFIG, entry_id="1", - version=2, + version=3, unique_id="trafikverket_camera-1234", title="Test location", ) @@ -149,6 +149,6 @@ async def test_coordinator_failed_get_image( await hass.async_block_till_done() mock_data.assert_called_once() - state = hass.states.get("camera.test_location") + state = hass.states.get("camera.test_camera") assert state is None assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY diff --git a/tests/components/trafikverket_camera/test_init.py b/tests/components/trafikverket_camera/test_init.py index 83a3fc1486a..e10c6c16e33 100644 --- a/tests/components/trafikverket_camera/test_init.py +++ b/tests/components/trafikverket_camera/test_init.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import datetime from unittest.mock import patch +import pytest from pytrafikverket.exceptions import UnknownError from pytrafikverket.trafikverket_camera import CameraInfo @@ -14,7 +15,7 @@ from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from . import ENTRY_CONFIG +from . import ENTRY_CONFIG, ENTRY_CONFIG_OLD_CONFIG from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -35,9 +36,9 @@ async def test_setup_entry( source=SOURCE_USER, data=ENTRY_CONFIG, entry_id="1", - version=2, + version=3, unique_id="trafikverket_camera-1234", - title="Test location", + title="Test Camera", ) entry.add_to_hass(hass) @@ -67,9 +68,9 @@ async def test_unload_entry( source=SOURCE_USER, data=ENTRY_CONFIG, entry_id="1", - version=2, + version=3, unique_id="trafikverket_camera-1234", - title="Test location", + title="Test Camera", ) entry.add_to_hass(hass) @@ -99,7 +100,7 @@ async def test_migrate_entry( entry = MockConfigEntry( domain=DOMAIN, source=SOURCE_USER, - data=ENTRY_CONFIG, + data=ENTRY_CONFIG_OLD_CONFIG, entry_id="1", unique_id="trafikverket_camera-Test location", title="Test location", @@ -114,15 +115,31 @@ async def test_migrate_entry( await hass.async_block_till_done() assert entry.state is config_entries.ConfigEntryState.LOADED - assert entry.version == 2 + assert entry.version == 3 assert entry.unique_id == "trafikverket_camera-1234" - assert len(mock_tvt_camera.mock_calls) == 2 + assert entry.data == ENTRY_CONFIG + assert len(mock_tvt_camera.mock_calls) == 3 +@pytest.mark.parametrize( + ("version", "unique_id"), + [ + ( + 1, + "trafikverket_camera-Test location", + ), + ( + 2, + "trafikverket_camera-1234", + ), + ], +) async def test_migrate_entry_fails_with_error( hass: HomeAssistant, get_camera: CameraInfo, aioclient_mock: AiohttpClientMocker, + version: int, + unique_id: str, ) -> None: """Test migrate entry fails with api error.""" aioclient_mock.get( @@ -132,9 +149,10 @@ async def test_migrate_entry_fails_with_error( entry = MockConfigEntry( domain=DOMAIN, source=SOURCE_USER, - data=ENTRY_CONFIG, + data=ENTRY_CONFIG_OLD_CONFIG, entry_id="1", - unique_id="trafikverket_camera-Test location", + version=version, + unique_id=unique_id, title="Test location", ) entry.add_to_hass(hass) @@ -147,14 +165,29 @@ async def test_migrate_entry_fails_with_error( await hass.async_block_till_done() assert entry.state is config_entries.ConfigEntryState.MIGRATION_ERROR - assert entry.version == 1 - assert entry.unique_id == "trafikverket_camera-Test location" + assert entry.version == version + assert entry.unique_id == unique_id assert len(mock_tvt_camera.mock_calls) == 1 +@pytest.mark.parametrize( + ("version", "unique_id"), + [ + ( + 1, + "trafikverket_camera-Test location", + ), + ( + 2, + "trafikverket_camera-1234", + ), + ], +) async def test_migrate_entry_fails_no_id( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, + version: int, + unique_id: str, ) -> None: """Test migrate entry fails, camera returns no id.""" aioclient_mock.get( @@ -164,9 +197,10 @@ async def test_migrate_entry_fails_no_id( entry = MockConfigEntry( domain=DOMAIN, source=SOURCE_USER, - data=ENTRY_CONFIG, + data=ENTRY_CONFIG_OLD_CONFIG, entry_id="1", - unique_id="trafikverket_camera-Test location", + version=version, + unique_id=unique_id, title="Test location", ) entry.add_to_hass(hass) @@ -195,8 +229,8 @@ async def test_migrate_entry_fails_no_id( await hass.async_block_till_done() assert entry.state is config_entries.ConfigEntryState.MIGRATION_ERROR - assert entry.version == 1 - assert entry.unique_id == "trafikverket_camera-Test location" + assert entry.version == version + assert entry.unique_id == unique_id assert len(mock_tvt_camera.mock_calls) == 1 @@ -214,7 +248,7 @@ async def test_no_migration_needed( domain=DOMAIN, source=SOURCE_USER, data=ENTRY_CONFIG, - version=2, + version=3, entry_id="1234", unique_id="trafikverket_camera-1234", title="Test location", diff --git a/tests/components/trafikverket_camera/test_recorder.py b/tests/components/trafikverket_camera/test_recorder.py index b9add7ae483..777c6ea26b3 100644 --- a/tests/components/trafikverket_camera/test_recorder.py +++ b/tests/components/trafikverket_camera/test_recorder.py @@ -24,7 +24,7 @@ async def test_exclude_attributes( get_camera: CameraInfo, ) -> None: """Test camera has description and location excluded from recording.""" - state1 = hass.states.get("camera.test_location") + state1 = hass.states.get("camera.test_camera") assert state1.state == "idle" assert state1.attributes["description"] == "Test Camera for testing" assert state1.attributes["location"] == "Test location" @@ -39,10 +39,10 @@ async def test_exclude_attributes( hass.states.async_entity_ids(), ) assert len(states) == 8 - assert states.get("camera.test_location") + assert states.get("camera.test_camera") for entity_states in states.values(): for state in entity_states: - if state.entity_id == "camera.test_location": + if state.entity_id == "camera.test_camera": assert "location" not in state.attributes assert "description" not in state.attributes assert "type" in state.attributes diff --git a/tests/components/trafikverket_camera/test_sensor.py b/tests/components/trafikverket_camera/test_sensor.py index 581fed1d289..c1c98aed797 100644 --- a/tests/components/trafikverket_camera/test_sensor.py +++ b/tests/components/trafikverket_camera/test_sensor.py @@ -15,15 +15,15 @@ async def test_sensor( ) -> None: """Test the Trafikverket Camera sensor.""" - state = hass.states.get("sensor.test_location_direction") + state = hass.states.get("sensor.test_camera_direction") assert state.state == "180" - state = hass.states.get("sensor.test_location_modified") + state = hass.states.get("sensor.test_camera_modified") assert state.state == "2022-04-04T04:04:04+00:00" - state = hass.states.get("sensor.test_location_photo_time") + state = hass.states.get("sensor.test_camera_photo_time") assert state.state == "2022-04-04T04:04:04+00:00" - state = hass.states.get("sensor.test_location_photo_url") + state = hass.states.get("sensor.test_camera_photo_url") assert state.state == "https://www.testurl.com/test_photo.jpg" - state = hass.states.get("sensor.test_location_status") + state = hass.states.get("sensor.test_camera_status") assert state.state == "Running" - state = hass.states.get("sensor.test_location_camera_type") + state = hass.states.get("sensor.test_camera_camera_type") assert state.state == "Road" diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index 3493e031669..1accd4b5a55 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -6,7 +6,6 @@ from unittest.mock import patch import pytest from pytrafikverket.exceptions import ( InvalidAuthentication, - MultipleTrainAnnouncementFound, MultipleTrainStationsFound, NoTrainAnnouncementFound, NoTrainStationFound, @@ -177,10 +176,6 @@ async def test_flow_fails( NoTrainAnnouncementFound, "no_trains", ), - ( - MultipleTrainAnnouncementFound, - "multiple_trains", - ), ( UnknownError, "cannot_connect", @@ -371,10 +366,6 @@ async def test_reauth_flow_error( NoTrainAnnouncementFound, "no_trains", ), - ( - MultipleTrainAnnouncementFound, - "multiple_trains", - ), ( UnknownError, "cannot_connect", diff --git a/tests/components/trafikverket_weatherstation/test_config_flow.py b/tests/components/trafikverket_weatherstation/test_config_flow.py index 36c30b33b53..e55e04d8411 100644 --- a/tests/components/trafikverket_weatherstation/test_config_flow.py +++ b/tests/components/trafikverket_weatherstation/test_config_flow.py @@ -15,6 +15,8 @@ from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + DOMAIN = "trafikverket_weatherstation" CONF_STATION = "station" @@ -97,3 +99,103 @@ async def test_flow_fails( ) assert result4["errors"] == {"base": base_error} + + +async def test_reauth_flow(hass: HomeAssistant) -> None: + """Test a reauthentication flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_API_KEY: "1234567890", + CONF_STATION: "Vallby", + }, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) + assert result["step_id"] == "reauth_confirm" + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "homeassistant.components.trafikverket_weatherstation.config_flow.TrafikverketWeather.async_get_weather", + ), patch( + "homeassistant.components.trafikverket_weatherstation.async_setup_entry", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891"}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert entry.data == {"api_key": "1234567891", "station": "Vallby"} + + +@pytest.mark.parametrize( + ("side_effect", "base_error"), + [ + ( + InvalidAuthentication, + "invalid_auth", + ), + ( + NoWeatherStationFound, + "invalid_station", + ), + ( + MultipleWeatherStationsFound, + "more_stations", + ), + ( + Exception, + "cannot_connect", + ), + ], +) +async def test_reauth_flow_fails( + hass: HomeAssistant, side_effect: Exception, base_error: str +) -> None: + """Test a reauthentication flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_API_KEY: "1234567890", + CONF_STATION: "Vallby", + }, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": entry.entry_id, + }, + data=entry.data, + ) + assert result["step_id"] == "reauth_confirm" + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + + with patch( + "homeassistant.components.trafikverket_weatherstation.config_flow.TrafikverketWeather.async_get_weather", + side_effect=side_effect(), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "1234567891"}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": base_error} diff --git a/tests/components/trend/test_binary_sensor.py b/tests/components/trend/test_binary_sensor.py index cccf1add61b..ddd980ae970 100644 --- a/tests/components/trend/test_binary_sensor.py +++ b/tests/components/trend/test_binary_sensor.py @@ -1,5 +1,6 @@ """The test for the Trend sensor platform.""" from datetime import timedelta +import logging from unittest.mock import patch import pytest @@ -68,6 +69,7 @@ class TestTrendBinarySensor: "sample_duration": 10000, "min_gradient": 1, "max_samples": 25, + "min_samples": 5, } }, } @@ -76,24 +78,35 @@ class TestTrendBinarySensor: self.hass.block_till_done() now = dt_util.utcnow() + + # add not enough states to trigger calculation for val in [10, 0, 20, 30]: with patch("homeassistant.util.dt.utcnow", return_value=now): self.hass.states.set("sensor.test_state", val) self.hass.block_till_done() now += timedelta(seconds=2) - state = self.hass.states.get("binary_sensor.test_trend_sensor") - assert state.state == "on" + assert ( + self.hass.states.get("binary_sensor.test_trend_sensor").state == "unknown" + ) - # have to change state value, otherwise sample will lost + # add one more state to trigger gradient calculation + for val in [100]: + with patch("homeassistant.util.dt.utcnow", return_value=now): + self.hass.states.set("sensor.test_state", val) + self.hass.block_till_done() + now += timedelta(seconds=2) + + assert self.hass.states.get("binary_sensor.test_trend_sensor").state == "on" + + # add more states to trigger a downtrend for val in [0, 30, 1, 0]: with patch("homeassistant.util.dt.utcnow", return_value=now): self.hass.states.set("sensor.test_state", val) self.hass.block_till_done() now += timedelta(seconds=2) - state = self.hass.states.get("binary_sensor.test_trend_sensor") - assert state.state == "off" + assert self.hass.states.get("binary_sensor.test_trend_sensor").state == "off" def test_down_using_trendline(self): """Test down trend using multiple samples and trendline calculation.""" @@ -434,10 +447,72 @@ async def test_restore_state( { "binary_sensor": { "platform": "trend", - "sensors": {"test_trend_sensor": {"entity_id": "sensor.test_state"}}, + "sensors": { + "test_trend_sensor": { + "entity_id": "sensor.test_state", + "sample_duration": 10000, + "min_gradient": 1, + "max_samples": 25, + "min_samples": 5, + } + }, } }, ) await hass.async_block_till_done() + # restored sensor should match saved one assert hass.states.get("binary_sensor.test_trend_sensor").state == restored_state + + now = dt_util.utcnow() + + # add not enough samples to trigger calculation + for val in [10, 20, 30, 40]: + with patch("homeassistant.util.dt.utcnow", return_value=now): + hass.states.async_set("sensor.test_state", val) + await hass.async_block_till_done() + now += timedelta(seconds=2) + + # state should match restored state as no calculation happened + assert hass.states.get("binary_sensor.test_trend_sensor").state == restored_state + + # add more samples to trigger calculation + for val in [50, 60, 70, 80]: + with patch("homeassistant.util.dt.utcnow", return_value=now): + hass.states.async_set("sensor.test_state", val) + await hass.async_block_till_done() + now += timedelta(seconds=2) + + # sensor should detect an upwards trend and turn on + assert hass.states.get("binary_sensor.test_trend_sensor").state == "on" + + +async def test_invalid_min_sample( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test if error is logged when min_sample is larger than max_samples.""" + with caplog.at_level(logging.ERROR): + assert await setup.async_setup_component( + hass, + "binary_sensor", + { + "binary_sensor": { + "platform": "trend", + "sensors": { + "test_trend_sensor": { + "entity_id": "sensor.test_state", + "max_samples": 25, + "min_samples": 30, + } + }, + } + }, + ) + await hass.async_block_till_done() + + record = caplog.records[0] + assert record.levelname == "ERROR" + assert ( + "Invalid config for 'binary_sensor.trend': min_samples must be smaller than or equal to max_samples" + in record.message + ) diff --git a/tests/components/unifi/test_button.py b/tests/components/unifi/test_button.py index 30a1b3e08ff..8e6dce71160 100644 --- a/tests/components/unifi/test_button.py +++ b/tests/components/unifi/test_button.py @@ -75,3 +75,89 @@ async def test_restart_device_button( # Controller reconnects await websocket_mock.reconnect() assert hass.states.get("button.switch_restart").state != STATE_UNAVAILABLE + + +async def test_power_cycle_poe( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, websocket_mock +) -> None: + """Test restarting device button.""" + config_entry = await setup_unifi_integration( + hass, + aioclient_mock, + devices_response=[ + { + "board_rev": 3, + "device_id": "mock-id", + "ip": "10.0.0.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "switch", + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + "port_table": [ + { + "media": "GE", + "name": "Port 1", + "port_idx": 1, + "poe_caps": 7, + "poe_class": "Class 4", + "poe_enable": True, + "poe_mode": "auto", + "poe_power": "2.56", + "poe_voltage": "53.40", + "portconf_id": "1a1", + "port_poe": True, + "up": True, + }, + ], + } + ], + ) + controller = hass.data[UNIFI_DOMAIN][config_entry.entry_id] + + assert len(hass.states.async_entity_ids(BUTTON_DOMAIN)) == 2 + + ent_reg = er.async_get(hass) + ent_reg_entry = ent_reg.async_get("button.switch_port_1_power_cycle") + assert ent_reg_entry.unique_id == "power_cycle-00:00:00:00:01:01_1" + assert ent_reg_entry.entity_category is EntityCategory.CONFIG + + # Validate state object + button = hass.states.get("button.switch_port_1_power_cycle") + assert button is not None + assert button.attributes.get(ATTR_DEVICE_CLASS) == ButtonDeviceClass.RESTART + + # Send restart device command + aioclient_mock.clear_requests() + aioclient_mock.post( + f"https://{controller.host}:1234/api/s/{controller.site}/cmd/devmgr", + ) + + await hass.services.async_call( + BUTTON_DOMAIN, + "press", + {"entity_id": "button.switch_port_1_power_cycle"}, + blocking=True, + ) + assert aioclient_mock.call_count == 1 + assert aioclient_mock.mock_calls[0][2] == { + "cmd": "power-cycle", + "mac": "00:00:00:00:01:01", + "port_idx": 1, + } + + # Availability signalling + + # Controller disconnects + await websocket_mock.disconnect() + assert ( + hass.states.get("button.switch_port_1_power_cycle").state == STATE_UNAVAILABLE + ) + + # Controller reconnects + await websocket_mock.reconnect() + assert ( + hass.states.get("button.switch_port_1_power_cycle").state != STATE_UNAVAILABLE + ) diff --git a/tests/components/unifi/test_controller.py b/tests/components/unifi/test_controller.py index 9d4bde2d016..268f4e8493a 100644 --- a/tests/components/unifi/test_controller.py +++ b/tests/components/unifi/test_controller.py @@ -167,6 +167,11 @@ def mock_default_unifi_requests( json={"data": wlans_response or [], "meta": {"rc": "ok"}}, headers={"content-type": CONTENT_TYPE_JSON}, ) + aioclient_mock.get( + f"https://{host}:1234/v2/api/site/{site_id}/trafficroutes", + json=[{}], + headers={"content-type": CONTENT_TYPE_JSON}, + ) aioclient_mock.get( f"https://{host}:1234/v2/api/site/{site_id}/trafficrules", json=[{}], @@ -460,6 +465,7 @@ async def test_get_unifi_controller_verify_ssl_false(hass: HomeAssistant) -> Non (aiounifi.RequestError, CannotConnect), (aiounifi.ResponseError, CannotConnect), (aiounifi.Unauthorized, AuthenticationRequired), + (aiounifi.Forbidden, AuthenticationRequired), (aiounifi.LoginRequired, AuthenticationRequired), (aiounifi.AiounifiException, AuthenticationRequired), ], diff --git a/tests/components/unifi/test_device_tracker.py b/tests/components/unifi/test_device_tracker.py index cbff868d9a6..abe12a1e243 100644 --- a/tests/components/unifi/test_device_tracker.py +++ b/tests/components/unifi/test_device_tracker.py @@ -939,13 +939,20 @@ async def test_restoring_client( ) registry = er.async_get(hass) - registry.async_get_or_create( + registry.async_get_or_create( # Unique ID updated TRACKER_DOMAIN, UNIFI_DOMAIN, f'{restored["mac"]}-site_id', suggested_object_id=restored["hostname"], config_entry=config_entry, ) + registry.async_get_or_create( # Unique ID already updated + TRACKER_DOMAIN, + UNIFI_DOMAIN, + f'site_id-{client["mac"]}', + suggested_object_id=client["hostname"], + config_entry=config_entry, + ) await setup_unifi_integration( hass, diff --git a/tests/components/unifi/test_switch.py b/tests/components/unifi/test_switch.py index cfcfbe6c3ed..00ebcd0e683 100644 --- a/tests/components/unifi/test_switch.py +++ b/tests/components/unifi/test_switch.py @@ -5,6 +5,7 @@ from datetime import timedelta from aiounifi.models.message import MessageKey import pytest +from homeassistant import config_entries from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -32,7 +33,12 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util -from .test_controller import CONTROLLER_HOST, SITE, setup_unifi_integration +from .test_controller import ( + CONTROLLER_HOST, + ENTRY_CONFIG, + SITE, + setup_unifi_integration, +) from tests.common import async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker @@ -771,7 +777,6 @@ async def test_no_clients( }, ) - assert aioclient_mock.call_count == 12 assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 @@ -852,7 +857,7 @@ async def test_switches( assert ent_reg.async_get(entry_id).entity_category is EntityCategory.CONFIG # Block and unblock client - + aioclient_mock.clear_requests() aioclient_mock.post( f"https://{controller.host}:1234/api/s/{controller.site}/cmd/stamgr", ) @@ -860,8 +865,8 @@ async def test_switches( await hass.services.async_call( SWITCH_DOMAIN, "turn_off", {"entity_id": "switch.block_client_1"}, blocking=True ) - assert aioclient_mock.call_count == 13 - assert aioclient_mock.mock_calls[12][2] == { + assert aioclient_mock.call_count == 1 + assert aioclient_mock.mock_calls[0][2] == { "mac": "00:00:00:00:01:01", "cmd": "block-sta", } @@ -869,14 +874,14 @@ async def test_switches( await hass.services.async_call( SWITCH_DOMAIN, "turn_on", {"entity_id": "switch.block_client_1"}, blocking=True ) - assert aioclient_mock.call_count == 14 - assert aioclient_mock.mock_calls[13][2] == { + assert aioclient_mock.call_count == 2 + assert aioclient_mock.mock_calls[1][2] == { "mac": "00:00:00:00:01:01", "cmd": "unblock-sta", } # Enable and disable DPI - + aioclient_mock.clear_requests() aioclient_mock.put( f"https://{controller.host}:1234/api/s/{controller.site}/rest/dpiapp/5f976f62e3c58f018ec7e17d", ) @@ -887,8 +892,8 @@ async def test_switches( {"entity_id": "switch.block_media_streaming"}, blocking=True, ) - assert aioclient_mock.call_count == 15 - assert aioclient_mock.mock_calls[14][2] == {"enabled": False} + assert aioclient_mock.call_count == 1 + assert aioclient_mock.mock_calls[0][2] == {"enabled": False} await hass.services.async_call( SWITCH_DOMAIN, @@ -896,8 +901,8 @@ async def test_switches( {"entity_id": "switch.block_media_streaming"}, blocking=True, ) - assert aioclient_mock.call_count == 16 - assert aioclient_mock.mock_calls[15][2] == {"enabled": True} + assert aioclient_mock.call_count == 2 + assert aioclient_mock.mock_calls[1][2] == {"enabled": True} async def test_remove_switches( @@ -976,6 +981,7 @@ async def test_block_switches( assert blocked is not None assert blocked.state == "off" + aioclient_mock.clear_requests() aioclient_mock.post( f"https://{controller.host}:1234/api/s/{controller.site}/cmd/stamgr", ) @@ -983,8 +989,8 @@ async def test_block_switches( await hass.services.async_call( SWITCH_DOMAIN, "turn_off", {"entity_id": "switch.block_client_1"}, blocking=True ) - assert aioclient_mock.call_count == 13 - assert aioclient_mock.mock_calls[12][2] == { + assert aioclient_mock.call_count == 1 + assert aioclient_mock.mock_calls[0][2] == { "mac": "00:00:00:00:01:01", "cmd": "block-sta", } @@ -992,8 +998,8 @@ async def test_block_switches( await hass.services.async_call( SWITCH_DOMAIN, "turn_on", {"entity_id": "switch.block_client_1"}, blocking=True ) - assert aioclient_mock.call_count == 14 - assert aioclient_mock.mock_calls[13][2] == { + assert aioclient_mock.call_count == 2 + assert aioclient_mock.mock_calls[1][2] == { "mac": "00:00:00:00:01:01", "cmd": "unblock-sta", } @@ -1585,3 +1591,70 @@ async def test_port_forwarding_switches( mock_unifi_websocket(message=MessageKey.PORT_FORWARD_DELETED, data=_data) await hass.async_block_till_done() assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 + + +async def test_updating_unique_id( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Verify outlet control and poe control unique ID update works.""" + poe_device = { + "board_rev": 3, + "device_id": "mock-id", + "ip": "10.0.0.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "switch", + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + "port_table": [ + { + "media": "GE", + "name": "Port 1", + "port_idx": 1, + "poe_caps": 7, + "poe_class": "Class 4", + "poe_enable": True, + "poe_mode": "auto", + "poe_power": "2.56", + "poe_voltage": "53.40", + "portconf_id": "1a1", + "port_poe": True, + "up": True, + }, + ], + } + + config_entry = config_entries.ConfigEntry( + version=1, + domain=UNIFI_DOMAIN, + title="Mock Title", + data=ENTRY_CONFIG, + source="test", + options={}, + entry_id="1", + ) + + registry = er.async_get(hass) + registry.async_get_or_create( + SWITCH_DOMAIN, + UNIFI_DOMAIN, + f'{poe_device["mac"]}-poe-1', + suggested_object_id="switch_port_1_poe", + config_entry=config_entry, + ) + registry.async_get_or_create( + SWITCH_DOMAIN, + UNIFI_DOMAIN, + f'{OUTLET_UP1["mac"]}-outlet-1', + suggested_object_id="plug_outlet_1", + config_entry=config_entry, + ) + + await setup_unifi_integration( + hass, aioclient_mock, devices_response=[poe_device, OUTLET_UP1] + ) + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 + assert hass.states.get("switch.switch_port_1_poe") + assert hass.states.get("switch.plug_outlet_1") diff --git a/tests/components/upnp/conftest.py b/tests/components/upnp/conftest.py index 0952b14303d..db166144925 100644 --- a/tests/components/upnp/conftest.py +++ b/tests/components/upnp/conftest.py @@ -1,6 +1,7 @@ """Configuration for SSDP tests.""" from __future__ import annotations +import copy from datetime import datetime from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch from urllib.parse import urlparse @@ -26,6 +27,7 @@ TEST_UDN = "uuid:device" TEST_ST = "urn:schemas-upnp-org:device:InternetGatewayDevice:1" TEST_USN = f"{TEST_UDN}::{TEST_ST}" TEST_LOCATION = "http://192.168.1.1/desc.xml" +TEST_LOCATION6 = "http://[fe80::1%2]/desc.xml" TEST_HOST = urlparse(TEST_LOCATION).hostname TEST_FRIENDLY_NAME = "mock-name" TEST_MAC_ADDRESS = "00:11:22:33:44:55" @@ -48,11 +50,23 @@ TEST_DISCOVERY = ssdp.SsdpServiceInfo( ssdp_headers={ "_host": TEST_HOST, }, + ssdp_all_locations={ + TEST_LOCATION, + }, ) +@pytest.fixture +def mock_async_create_device(): + """Mock async_upnp_client create device.""" + with patch( + "homeassistant.components.upnp.device.UpnpFactory.async_create_device" + ) as mock_create: + yield mock_create + + @pytest.fixture(autouse=True) -def mock_igd_device() -> IgdDevice: +def mock_igd_device(mock_async_create_device) -> IgdDevice: """Mock async_upnp_client device.""" mock_upnp_device = create_autospec(UpnpDevice, instance=True) mock_upnp_device.device_url = TEST_DISCOVERY.ssdp_location @@ -85,8 +99,6 @@ def mock_igd_device() -> IgdDevice: ) with patch( - "homeassistant.components.upnp.device.UpnpFactory.async_create_device" - ), patch( "homeassistant.components.upnp.device.IgdDevice.__new__", return_value=mock_igd_device, ): @@ -131,16 +143,16 @@ async def silent_ssdp_scanner(hass): ), patch("homeassistant.components.ssdp.Scanner._async_stop_ssdp_listeners"), patch( "homeassistant.components.ssdp.Scanner.async_scan" ), patch( - "homeassistant.components.ssdp.Server._async_start_upnp_servers" + "homeassistant.components.ssdp.Server._async_start_upnp_servers", ), patch( - "homeassistant.components.ssdp.Server._async_stop_upnp_servers" + "homeassistant.components.ssdp.Server._async_stop_upnp_servers", ): yield @pytest.fixture async def ssdp_instant_discovery(): - """Instance discovery.""" + """Instant discovery.""" # Set up device discovery callback. async def register_callback(hass, callback, match_dict): @@ -158,6 +170,30 @@ async def ssdp_instant_discovery(): yield (mock_register, mock_get_info) +@pytest.fixture +async def ssdp_instant_discovery_multi_location(): + """Instant discovery.""" + + test_discovery = copy.deepcopy(TEST_DISCOVERY) + test_discovery.ssdp_location = TEST_LOCATION6 # "Default" location is IPv6. + test_discovery.ssdp_all_locations = {TEST_LOCATION6, TEST_LOCATION} + + # Set up device discovery callback. + async def register_callback(hass, callback, match_dict): + """Immediately do callback.""" + await callback(test_discovery, ssdp.SsdpChange.ALIVE) + return MagicMock() + + with patch( + "homeassistant.components.ssdp.async_register_callback", + side_effect=register_callback, + ) as mock_register, patch( + "homeassistant.components.ssdp.async_get_discovery_info_by_st", + return_value=[test_discovery], + ) as mock_get_info: + yield (mock_register, mock_get_info) + + @pytest.fixture async def ssdp_no_discovery(): """No discovery.""" @@ -197,6 +233,8 @@ async def mock_config_entry( CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, ) + + # Store igd_device for binary_sensor/sensor tests. entry.igd_device = mock_igd_device # Load config_entry. diff --git a/tests/components/upnp/test_config_flow.py b/tests/components/upnp/test_config_flow.py index 4c69b6f6875..7c542e33c9d 100644 --- a/tests/components/upnp/test_config_flow.py +++ b/tests/components/upnp/test_config_flow.py @@ -134,6 +134,7 @@ async def test_flow_ssdp_non_igd_device(hass: HomeAssistant) -> None: ssdp_usn=TEST_USN, ssdp_st=TEST_ST, ssdp_location=TEST_LOCATION, + ssdp_all_locations=[TEST_LOCATION], upnp={ ssdp.ATTR_UPNP_DEVICE_TYPE: "urn:schemas-upnp-org:device:WFADevice:1", # Non-IGD ssdp.ATTR_UPNP_UDN: TEST_UDN, @@ -324,6 +325,7 @@ async def test_flow_ssdp_discovery_changed_location(hass: HomeAssistant) -> None new_location = TEST_DISCOVERY.ssdp_location + "2" new_discovery = deepcopy(TEST_DISCOVERY) new_discovery.ssdp_location = new_location + new_discovery.ssdp_all_locations = {new_location} result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_SSDP}, diff --git a/tests/components/upnp/test_init.py b/tests/components/upnp/test_init.py index e775757cb1f..d1d3dfa6c35 100644 --- a/tests/components/upnp/test_init.py +++ b/tests/components/upnp/test_init.py @@ -1,6 +1,8 @@ """Test UPnP/IGD setup process.""" from __future__ import annotations +from unittest.mock import AsyncMock + import pytest from homeassistant.components.upnp.const import ( @@ -60,3 +62,35 @@ async def test_async_setup_entry_default_no_mac_address(hass: HomeAssistant) -> # Load config_entry. entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) is True + + +@pytest.mark.usefixtures( + "ssdp_instant_discovery_multi_location", + "mock_get_source_ip", + "mock_mac_address_from_host", +) +async def test_async_setup_entry_multi_location( + hass: HomeAssistant, mock_async_create_device: AsyncMock +) -> None: + """Test async_setup_entry for a device both seen via IPv4 and IPv6. + + The resulting IPv4 location is preferred/stored. + """ + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_USN, + data={ + CONFIG_ENTRY_ST: TEST_ST, + CONFIG_ENTRY_UDN: TEST_UDN, + CONFIG_ENTRY_ORIGINAL_UDN: TEST_UDN, + CONFIG_ENTRY_LOCATION: TEST_LOCATION, + CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, + }, + ) + + # Load config_entry. + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) is True + + # Ensure that the IPv4 location is used. + mock_async_create_device.assert_called_once_with(TEST_LOCATION) diff --git a/tests/components/usb/test_init.py b/tests/components/usb/test_init.py index e7c878b6f40..a1637f62b01 100644 --- a/tests/components/usb/test_init.py +++ b/tests/components/usb/test_init.py @@ -94,9 +94,7 @@ async def test_observer_discovery( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -145,9 +143,7 @@ async def test_removal_by_observer_before_started( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch( - "pyudev.MonitorObserver", new=_create_mock_monitor_observer - ), patch.object( + ), patch("pyudev.MonitorObserver", new=_create_mock_monitor_observer), patch.object( hass.config_entries.flow, "async_init" ) as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -184,9 +180,7 @@ async def test_discovered_by_websocket_scan( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -224,9 +218,7 @@ async def test_discovered_by_websocket_scan_limited_by_description_matcher( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -265,9 +257,7 @@ async def test_most_targeted_matcher_wins( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -305,9 +295,7 @@ async def test_discovered_by_websocket_scan_rejected_by_description_matcher( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -349,9 +337,7 @@ async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -389,9 +375,7 @@ async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -433,9 +417,7 @@ async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -478,9 +460,7 @@ async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -517,9 +497,7 @@ async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -554,9 +532,7 @@ async def test_discovered_by_websocket_scan_match_vid_only( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -592,9 +568,7 @@ async def test_discovered_by_websocket_scan_match_vid_wrong_pid( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -629,9 +603,7 @@ async def test_discovered_by_websocket_no_vid_pid( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -667,9 +639,7 @@ async def test_non_matching_discovered_by_scanner_after_started( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -708,9 +678,7 @@ async def test_observer_on_wsl_fallback_without_throwing_exception( "pyudev.Monitor.filter_by", side_effect=ValueError ), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -760,9 +728,7 @@ async def test_not_discovered_by_observer_before_started_on_docker( "homeassistant.components.usb.async_get_usb", return_value=new_usb ), patch( "homeassistant.components.usb.comports", return_value=mock_comports - ), patch( - "pyudev.MonitorObserver", new=_create_mock_monitor_observer - ): + ), patch("pyudev.MonitorObserver", new=_create_mock_monitor_observer): assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() @@ -1047,9 +1013,7 @@ async def test_resolve_serial_by_id( ), patch( "homeassistant.components.usb.get_serial_by_id", return_value="/dev/serial/by-id/bla", - ), patch.object( - hass.config_entries.flow, "async_init" - ) as mock_config_flow: + ), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow: assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) diff --git a/tests/components/vallox/test_fan.py b/tests/components/vallox/test_fan.py index eb60a3d025d..12b24f46aba 100644 --- a/tests/components/vallox/test_fan.py +++ b/tests/components/vallox/test_fan.py @@ -10,6 +10,7 @@ from homeassistant.components.fan import ( DOMAIN as FAN_DOMAIN, SERVICE_SET_PERCENTAGE, SERVICE_SET_PRESET_MODE, + NotValidPresetModeError, ) from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant @@ -179,7 +180,7 @@ async def test_set_invalid_preset_mode( """Test set preset mode.""" await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() - with pytest.raises(ValueError): + with pytest.raises(NotValidPresetModeError) as exc: await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PRESET_MODE, @@ -189,6 +190,7 @@ async def test_set_invalid_preset_mode( }, blocking=True, ) + assert exc.value.translation_key == "not_valid_preset_mode" async def test_set_preset_mode_exception( diff --git a/tests/components/vilfo/test_config_flow.py b/tests/components/vilfo/test_config_flow.py index 0aa59c9271f..b893d2df550 100644 --- a/tests/components/vilfo/test_config_flow.py +++ b/tests/components/vilfo/test_config_flow.py @@ -24,9 +24,7 @@ async def test_form(hass: HomeAssistant) -> None: "vilfo.Client.get_board_information", return_value=None ), patch( "vilfo.Client.resolve_firmware_version", return_value=firmware_version - ), patch( - "vilfo.Client.resolve_mac_address", return_value=mock_mac - ), patch( + ), patch("vilfo.Client.resolve_mac_address", return_value=mock_mac), patch( "homeassistant.components.vilfo.async_setup_entry" ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( @@ -117,9 +115,7 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: return_value=None, ), patch( "vilfo.Client.resolve_firmware_version", return_value=firmware_version - ), patch( - "vilfo.Client.resolve_mac_address", return_value=None - ): + ), patch("vilfo.Client.resolve_mac_address", return_value=None): first_flow_result2 = await hass.config_entries.flow.async_configure( first_flow_result1["flow_id"], {CONF_HOST: "testadmin.vilfo.com", CONF_ACCESS_TOKEN: "test-token"}, @@ -134,9 +130,7 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: return_value=None, ), patch( "vilfo.Client.resolve_firmware_version", return_value=firmware_version - ), patch( - "vilfo.Client.resolve_mac_address", return_value=None - ): + ), patch("vilfo.Client.resolve_mac_address", return_value=None): second_flow_result2 = await hass.config_entries.flow.async_configure( second_flow_result1["flow_id"], {CONF_HOST: "testadmin.vilfo.com", CONF_ACCESS_TOKEN: "test-token"}, @@ -177,9 +171,7 @@ async def test_validate_input_returns_data(hass: HomeAssistant) -> None: "vilfo.Client.get_board_information", return_value=None ), patch( "vilfo.Client.resolve_firmware_version", return_value=firmware_version - ), patch( - "vilfo.Client.resolve_mac_address", return_value=None - ): + ), patch("vilfo.Client.resolve_mac_address", return_value=None): result = await hass.components.vilfo.config_flow.validate_input( hass, data=mock_data ) @@ -193,9 +185,7 @@ async def test_validate_input_returns_data(hass: HomeAssistant) -> None: "vilfo.Client.get_board_information", return_value=None ), patch( "vilfo.Client.resolve_firmware_version", return_value=firmware_version - ), patch( - "vilfo.Client.resolve_mac_address", return_value=mock_mac - ): + ), patch("vilfo.Client.resolve_mac_address", return_value=mock_mac): result2 = await hass.components.vilfo.config_flow.validate_input( hass, data=mock_data ) diff --git a/tests/components/vlc_telnet/test_config_flow.py b/tests/components/vlc_telnet/test_config_flow.py index 91ea5b3e439..a94f290f7e6 100644 --- a/tests/components/vlc_telnet/test_config_flow.py +++ b/tests/components/vlc_telnet/test_config_flow.py @@ -124,7 +124,7 @@ async def test_errors( "homeassistant.components.vlc_telnet.config_flow.Client.login", side_effect=login_side_effect, ), patch( - "homeassistant.components.vlc_telnet.config_flow.Client.disconnect" + "homeassistant.components.vlc_telnet.config_flow.Client.disconnect", ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -219,7 +219,7 @@ async def test_reauth_errors( "homeassistant.components.vlc_telnet.config_flow.Client.login", side_effect=login_side_effect, ), patch( - "homeassistant.components.vlc_telnet.config_flow.Client.disconnect" + "homeassistant.components.vlc_telnet.config_flow.Client.disconnect", ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -316,7 +316,7 @@ async def test_hassio_errors( "homeassistant.components.vlc_telnet.config_flow.Client.login", side_effect=login_side_effect, ), patch( - "homeassistant.components.vlc_telnet.config_flow.Client.disconnect" + "homeassistant.components.vlc_telnet.config_flow.Client.disconnect", ): result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/vodafone_station/test_config_flow.py b/tests/components/vodafone_station/test_config_flow.py index 982a14a80f4..00b1ae6e72a 100644 --- a/tests/components/vodafone_station/test_config_flow.py +++ b/tests/components/vodafone_station/test_config_flow.py @@ -24,7 +24,7 @@ async def test_user(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.vodafone_station.async_setup_entry" ) as mock_setup_entry, patch( - "requests.get" + "requests.get", ) as mock_request_get: mock_request_get.return_value.status_code = 200 @@ -90,7 +90,7 @@ async def test_exception_connection(hass: HomeAssistant, side_effect, error) -> ), patch( "homeassistant.components.vodafone_station.config_flow.VodafoneStationSercommApi.logout", ), patch( - "homeassistant.components.vodafone_station.async_setup_entry" + "homeassistant.components.vodafone_station.async_setup_entry", ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -122,9 +122,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: ), patch( "homeassistant.components.vodafone_station.config_flow.VodafoneStationSercommApi.logout", ), patch( - "homeassistant.components.vodafone_station.async_setup_entry" + "homeassistant.components.vodafone_station.async_setup_entry", ), patch( - "requests.get" + "requests.get", ) as mock_request_get: mock_request_get.return_value.status_code = 200 @@ -170,7 +170,7 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> ), patch( "homeassistant.components.vodafone_station.config_flow.VodafoneStationSercommApi.logout", ), patch( - "homeassistant.components.vodafone_station.async_setup_entry" + "homeassistant.components.vodafone_station.async_setup_entry", ): result = await hass.config_entries.flow.async_init( DOMAIN, @@ -204,7 +204,7 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> ), patch( "homeassistant.components.vodafone_station.config_flow.VodafoneStationSercommApi.logout", ), patch( - "homeassistant.components.vodafone_station.async_setup_entry" + "homeassistant.components.vodafone_station.async_setup_entry", ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index f82a00087c6..dbb848f3b9d 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -1,7 +1,9 @@ """Test VoIP protocol.""" import asyncio +import io import time from unittest.mock import AsyncMock, Mock, patch +import wave import pytest @@ -14,6 +16,24 @@ _ONE_SECOND = 16000 * 2 # 16Khz 16-bit _MEDIA_ID = "12345" +@pytest.fixture(autouse=True) +def mock_tts_cache_dir_autouse(mock_tts_cache_dir): + """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir + + +def _empty_wav() -> bytes: + """Return bytes of an empty WAV file.""" + with io.BytesIO() as wav_io: + wav_file: wave.Wave_write = wave.open(wav_io, "wb") + with wav_file: + wav_file.setframerate(16000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + + return wav_io.getvalue() + + async def test_pipeline( hass: HomeAssistant, voip_device: VoIPDevice, @@ -72,8 +92,7 @@ async def test_pipeline( media_source_id: str, ) -> tuple[str, bytes]: assert media_source_id == _MEDIA_ID - - return ("mp3", b"") + return ("wav", _empty_wav()) with patch( "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", @@ -266,7 +285,7 @@ async def test_tts_timeout( media_source_id: str, ) -> tuple[str, bytes]: # Should time out immediately - return ("raw", bytes(0)) + return ("wav", _empty_wav()) with patch( "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", @@ -305,8 +324,8 @@ async def test_tts_timeout( done.set() - rtp_protocol._async_send_audio = AsyncMock(side_effect=async_send_audio) - rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) + rtp_protocol._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] + rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence rtp_protocol.on_chunk(bytes(_ONE_SECOND)) @@ -320,3 +339,264 @@ async def test_tts_timeout( # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): await done.wait() + + +async def test_tts_wrong_extension( + hass: HomeAssistant, + voip_device: VoIPDevice, +) -> None: + """Test that TTS will only stream WAV audio.""" + assert await async_setup_component(hass, "voip", {}) + + def is_speech(self, chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 + + done = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, **kwargs): + stt_stream = kwargs["stt_stream"] + event_callback = kwargs["event_callback"] + async for _chunk in stt_stream: + # Stream will end when VAD detects end of "speech" + pass + + # Fake intent result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.INTENT_END, + data={ + "intent_output": { + "conversation_id": "fake-conversation", + } + }, + ) + ) + + # Proceed with media output + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.TTS_END, + data={"tts_output": {"media_id": _MEDIA_ID}}, + ) + ) + + async def async_get_media_source_audio( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + # Should fail because it's not "wav" + return ("mp3", b"") + + with patch( + "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", + new=is_speech, + ), patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), patch( + "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + new=async_get_media_source_audio, + ): + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + ) + rtp_protocol.transport = Mock() + + original_send_tts = rtp_protocol._send_tts + + async def send_tts(*args, **kwargs): + # Call original then end test successfully + with pytest.raises(ValueError): + await original_send_tts(*args, **kwargs) + + done.set() + + rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + + # silence + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + + # "speech" + rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + + # silence (assumes relaxed VAD sensitivity) + rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + + # Wait for mock pipeline to exhaust the audio stream + async with asyncio.timeout(1): + await done.wait() + + +async def test_tts_wrong_wav_format( + hass: HomeAssistant, + voip_device: VoIPDevice, +) -> None: + """Test that TTS will only stream WAV audio with a specific format.""" + assert await async_setup_component(hass, "voip", {}) + + def is_speech(self, chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 + + done = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, **kwargs): + stt_stream = kwargs["stt_stream"] + event_callback = kwargs["event_callback"] + async for _chunk in stt_stream: + # Stream will end when VAD detects end of "speech" + pass + + # Fake intent result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.INTENT_END, + data={ + "intent_output": { + "conversation_id": "fake-conversation", + } + }, + ) + ) + + # Proceed with media output + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.TTS_END, + data={"tts_output": {"media_id": _MEDIA_ID}}, + ) + ) + + async def async_get_media_source_audio( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + # Should fail because it's not 16Khz, 16-bit mono + with io.BytesIO() as wav_io: + wav_file: wave.Wave_write = wave.open(wav_io, "wb") + with wav_file: + wav_file.setframerate(22050) + wav_file.setsampwidth(2) + wav_file.setnchannels(2) + + return ("wav", wav_io.getvalue()) + + with patch( + "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", + new=is_speech, + ), patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), patch( + "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + new=async_get_media_source_audio, + ): + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + ) + rtp_protocol.transport = Mock() + + original_send_tts = rtp_protocol._send_tts + + async def send_tts(*args, **kwargs): + # Call original then end test successfully + with pytest.raises(ValueError): + await original_send_tts(*args, **kwargs) + + done.set() + + rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + + # silence + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + + # "speech" + rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + + # silence (assumes relaxed VAD sensitivity) + rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + + # Wait for mock pipeline to exhaust the audio stream + async with asyncio.timeout(1): + await done.wait() + + +async def test_empty_tts_output( + hass: HomeAssistant, + voip_device: VoIPDevice, +) -> None: + """Test that TTS will not stream when output is empty.""" + assert await async_setup_component(hass, "voip", {}) + + def is_speech(self, chunk): + """Anything non-zero is speech.""" + return sum(chunk) > 0 + + async def async_pipeline_from_audio_stream(*args, **kwargs): + stt_stream = kwargs["stt_stream"] + event_callback = kwargs["event_callback"] + async for _chunk in stt_stream: + # Stream will end when VAD detects end of "speech" + pass + + # Fake intent result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.INTENT_END, + data={ + "intent_output": { + "conversation_id": "fake-conversation", + } + }, + ) + ) + + # Empty TTS output + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.TTS_END, + data={"tts_output": {}}, + ) + ) + + with patch( + "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", + new=is_speech, + ), patch( + "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), patch( + "homeassistant.components.voip.voip.PipelineRtpDatagramProtocol._send_tts", + ) as mock_send_tts: + rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( + hass, + hass.config.language, + voip_device, + Context(), + opus_payload_type=123, + ) + rtp_protocol.transport = Mock() + + # silence + rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + + # "speech" + rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + + # silence (assumes relaxed VAD sensitivity) + rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + + # Wait for mock pipeline to finish + async with asyncio.timeout(1): + await rtp_protocol._tts_done.wait() + + mock_send_tts.assert_not_called() diff --git a/tests/components/wallbox/test_number.py b/tests/components/wallbox/test_number.py index 738b9bf7bd6..837df4dfd47 100644 --- a/tests/components/wallbox/test_number.py +++ b/tests/components/wallbox/test_number.py @@ -43,7 +43,7 @@ async def test_wallbox_number_class( status_code=200, ) state = hass.states.get(MOCK_NUMBER_ENTITY_ID) - assert state.attributes["min"] == 0 + assert state.attributes["min"] == 6 assert state.attributes["max"] == 25 await hass.services.async_call( diff --git a/tests/components/waqi/test_config_flow.py b/tests/components/waqi/test_config_flow.py index 7a95e000d82..ecc7e07158d 100644 --- a/tests/components/waqi/test_config_flow.py +++ b/tests/components/waqi/test_config_flow.py @@ -235,9 +235,9 @@ async def test_error_in_second_step( with patch( "aiowaqi.WAQIClient.authenticate", - ), patch( - "aiowaqi.WAQIClient.get_by_coordinates", side_effect=exception - ), patch("aiowaqi.WAQIClient.get_by_station_number", side_effect=exception): + ), patch("aiowaqi.WAQIClient.get_by_coordinates", side_effect=exception), patch( + "aiowaqi.WAQIClient.get_by_station_number", side_effect=exception + ): result = await hass.config_entries.flow.async_configure( result["flow_id"], payload, diff --git a/tests/components/watttime/conftest.py b/tests/components/watttime/conftest.py index f3c1986fcb0..f636ffefcfb 100644 --- a/tests/components/watttime/conftest.py +++ b/tests/components/watttime/conftest.py @@ -106,9 +106,7 @@ async def setup_watttime_fixture(hass, client, config_auth, config_coordinates): ), patch( "homeassistant.components.watttime.config_flow.Client.async_login", return_value=client, - ), patch( - "homeassistant.components.watttime.PLATFORMS", [] - ): + ), patch("homeassistant.components.watttime.PLATFORMS", []): assert await async_setup_component( hass, DOMAIN, {**config_auth, **config_coordinates} ) diff --git a/tests/components/weather/snapshots/test_init.ambr b/tests/components/weather/snapshots/test_init.ambr index 03a2d46c80f..1aa78f6bf35 100644 --- a/tests/components/weather/snapshots/test_init.ambr +++ b/tests/components/weather/snapshots/test_init.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_get_forecast[daily-1] +# name: test_get_forecast[daily-1-get_forecast] dict({ 'forecast': list([ dict({ @@ -12,7 +12,22 @@ ]), }) # --- -# name: test_get_forecast[hourly-2] +# name: test_get_forecast[daily-1-get_forecasts] + dict({ + 'weather.testing': dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': None, + 'temperature': 38.0, + 'templow': 38.0, + 'uv_index': None, + 'wind_bearing': None, + }), + ]), + }), + }) +# --- +# name: test_get_forecast[hourly-2-get_forecast] dict({ 'forecast': list([ dict({ @@ -25,7 +40,22 @@ ]), }) # --- -# name: test_get_forecast[twice_daily-4] +# name: test_get_forecast[hourly-2-get_forecasts] + dict({ + 'weather.testing': dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': None, + 'temperature': 38.0, + 'templow': 38.0, + 'uv_index': None, + 'wind_bearing': None, + }), + ]), + }), + }) +# --- +# name: test_get_forecast[twice_daily-4-get_forecast] dict({ 'forecast': list([ dict({ @@ -39,3 +69,19 @@ ]), }) # --- +# name: test_get_forecast[twice_daily-4-get_forecasts] + dict({ + 'weather.testing': dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': None, + 'is_daytime': True, + 'temperature': 38.0, + 'templow': 38.0, + 'uv_index': None, + 'wind_bearing': None, + }), + ]), + }), + }) +# --- diff --git a/tests/components/weather/test_init.py b/tests/components/weather/test_init.py index f62bed295da..3890d6a28d1 100644 --- a/tests/components/weather/test_init.py +++ b/tests/components/weather/test_init.py @@ -32,8 +32,9 @@ from homeassistant.components.weather import ( ATTR_WEATHER_WIND_SPEED, ATTR_WEATHER_WIND_SPEED_UNIT, DOMAIN, + LEGACY_SERVICE_GET_FORECAST, ROUNDING_PRECISION, - SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, Forecast, WeatherEntity, WeatherEntityFeature, @@ -959,6 +960,13 @@ async def test_forecast_twice_daily_missing_is_daytime( assert msg["type"] == "result" +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) @pytest.mark.parametrize( ("forecast_type", "supported_features"), [ @@ -976,6 +984,7 @@ async def test_get_forecast( forecast_type: str, supported_features: int, snapshot: SnapshotAssertion, + service: str, ) -> None: """Test get forecast service.""" @@ -1006,7 +1015,7 @@ async def test_get_forecast( response = await hass.services.async_call( DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": entity0.entity_id, "type": forecast_type, @@ -1017,9 +1026,30 @@ async def test_get_forecast( assert response == snapshot +@pytest.mark.parametrize( + ("service", "expected"), + [ + ( + SERVICE_GET_FORECASTS, + { + "weather.testing": { + "forecast": [], + } + }, + ), + ( + LEGACY_SERVICE_GET_FORECAST, + { + "forecast": [], + }, + ), + ], +) async def test_get_forecast_no_forecast( hass: HomeAssistant, config_flow_fixture: None, + service: str, + expected: dict[str, list | dict[str, list]], ) -> None: """Test get forecast service.""" @@ -1040,7 +1070,7 @@ async def test_get_forecast_no_forecast( response = await hass.services.async_call( DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": entity0.entity_id, "type": "daily", @@ -1048,11 +1078,16 @@ async def test_get_forecast_no_forecast( blocking=True, return_response=True, ) - assert response == { - "forecast": [], - } + assert response == expected +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) @pytest.mark.parametrize( ("supported_features", "forecast_types"), [ @@ -1066,6 +1101,7 @@ async def test_get_forecast_unsupported( config_flow_fixture: None, forecast_types: list[str], supported_features: int, + service: str, ) -> None: """Test get forecast service.""" @@ -1095,7 +1131,7 @@ async def test_get_forecast_unsupported( with pytest.raises(HomeAssistantError): await hass.services.async_call( DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": weather_entity.entity_id, "type": forecast_type, @@ -1255,3 +1291,52 @@ async def test_issue_forecast_deprecated_no_logging( "custom_components.test_weather.weather::weather.test is using a forecast attribute on an instance of WeatherEntity" not in caplog.text ) + + +async def test_issue_deprecated_service_weather_get_forecast( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + config_flow_fixture: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the issue is raised on deprecated service weather.get_forecast.""" + + class MockWeatherMock(MockWeatherTest): + """Mock weather class.""" + + async def async_forecast_daily(self) -> list[Forecast] | None: + """Return the forecast_daily.""" + return self.forecast_list + + kwargs = { + "native_temperature": 38, + "native_temperature_unit": UnitOfTemperature.CELSIUS, + "supported_features": WeatherEntityFeature.FORECAST_DAILY, + } + + entity0 = await create_entity(hass, MockWeatherMock, None, **kwargs) + + _ = await hass.services.async_call( + DOMAIN, + LEGACY_SERVICE_GET_FORECAST, + { + "entity_id": entity0.entity_id, + "type": "daily", + }, + blocking=True, + return_response=True, + ) + + issue = issue_registry.async_get_issue( + "weather", "deprecated_service_weather_get_forecast" + ) + assert issue + assert issue.issue_domain == "test" + assert issue.issue_id == "deprecated_service_weather_get_forecast" + assert issue.translation_key == "deprecated_service_weather_get_forecast" + + assert ( + "Detected use of service 'weather.get_forecast'. " + "This is deprecated and will stop working in Home Assistant 2024.6. " + "Use 'weather.get_forecasts' instead which supports multiple entities" + ) in caplog.text diff --git a/tests/components/weather/test_intent.py b/tests/components/weather/test_intent.py new file mode 100644 index 00000000000..1a171da7fae --- /dev/null +++ b/tests/components/weather/test_intent.py @@ -0,0 +1,108 @@ +"""Test weather intents.""" +from unittest.mock import patch + +import pytest + +from homeassistant.components.weather import ( + DOMAIN, + WeatherEntity, + intent as weather_intent, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent +from homeassistant.setup import async_setup_component + + +async def test_get_weather(hass: HomeAssistant) -> None: + """Test get weather for first entity and by name.""" + assert await async_setup_component(hass, "weather", {"weather": {}}) + + entity1 = WeatherEntity() + entity1._attr_name = "Weather 1" + entity1.entity_id = "weather.test_1" + + entity2 = WeatherEntity() + entity2._attr_name = "Weather 2" + entity2.entity_id = "weather.test_2" + + await hass.data[DOMAIN].async_add_entities([entity1, entity2]) + + await weather_intent.async_setup_intents(hass) + + # First entity will be chosen + response = await intent.async_handle( + hass, "test", weather_intent.INTENT_GET_WEATHER, {} + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(response.matched_states) == 1 + state = response.matched_states[0] + assert state.entity_id == entity1.entity_id + + # Named entity will be chosen + response = await intent.async_handle( + hass, + "test", + weather_intent.INTENT_GET_WEATHER, + {"name": {"value": "Weather 2"}}, + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(response.matched_states) == 1 + state = response.matched_states[0] + assert state.entity_id == entity2.entity_id + + +async def test_get_weather_wrong_name(hass: HomeAssistant) -> None: + """Test get weather with the wrong name.""" + assert await async_setup_component(hass, "weather", {"weather": {}}) + + entity1 = WeatherEntity() + entity1._attr_name = "Weather 1" + entity1.entity_id = "weather.test_1" + + await hass.data[DOMAIN].async_add_entities([entity1]) + + await weather_intent.async_setup_intents(hass) + + # Incorrect name + with pytest.raises(intent.IntentHandleError): + await intent.async_handle( + hass, + "test", + weather_intent.INTENT_GET_WEATHER, + {"name": {"value": "not the right name"}}, + ) + + +async def test_get_weather_no_entities(hass: HomeAssistant) -> None: + """Test get weather with no weather entities.""" + assert await async_setup_component(hass, "weather", {"weather": {}}) + await weather_intent.async_setup_intents(hass) + + # No weather entities + with pytest.raises(intent.IntentHandleError): + await intent.async_handle(hass, "test", weather_intent.INTENT_GET_WEATHER, {}) + + +async def test_get_weather_no_state(hass: HomeAssistant) -> None: + """Test get weather when state is not returned.""" + assert await async_setup_component(hass, "weather", {"weather": {}}) + + entity1 = WeatherEntity() + entity1._attr_name = "Weather 1" + entity1.entity_id = "weather.test_1" + + await hass.data[DOMAIN].async_add_entities([entity1]) + + await weather_intent.async_setup_intents(hass) + + # Success with state + response = await intent.async_handle( + hass, "test", weather_intent.INTENT_GET_WEATHER, {} + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + + # Failure without state + with patch("homeassistant.core.StateMachine.get", return_value=None), pytest.raises( + intent.IntentHandleError + ): + await intent.async_handle(hass, "test", weather_intent.INTENT_GET_WEATHER, {}) diff --git a/tests/components/weatherkit/snapshots/test_weather.ambr b/tests/components/weatherkit/snapshots/test_weather.ambr index 63321b5a813..1fbe5389e98 100644 --- a/tests/components/weatherkit/snapshots/test_weather.ambr +++ b/tests/components/weatherkit/snapshots/test_weather.ambr @@ -95,6 +95,298 @@ ]), }) # --- +# name: test_daily_forecast[forecast] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 28.6, + 'templow': 21.2, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-09T15:00:00Z', + 'precipitation': 3.6, + 'precipitation_probability': 45.0, + 'temperature': 30.6, + 'templow': 21.0, + 'uv_index': 6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-11T15:00:00Z', + 'precipitation': 0.7, + 'precipitation_probability': 47.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-12T15:00:00Z', + 'precipitation': 7.7, + 'precipitation_probability': 37.0, + 'temperature': 30.4, + 'templow': 22.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-13T15:00:00Z', + 'precipitation': 0.6, + 'precipitation_probability': 45.0, + 'temperature': 31.0, + 'templow': 22.6, + 'uv_index': 6, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'temperature': 31.5, + 'templow': 22.4, + 'uv_index': 7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2023-09-15T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 31.8, + 'templow': 23.3, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-16T15:00:00Z', + 'precipitation': 5.3, + 'precipitation_probability': 35.0, + 'temperature': 30.7, + 'templow': 23.2, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-17T15:00:00Z', + 'precipitation': 2.1, + 'precipitation_probability': 49.0, + 'temperature': 28.1, + 'templow': 22.5, + 'uv_index': 6, + }), + ]), + }), + }) +# --- +# name: test_daily_forecast[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 28.6, + 'templow': 21.2, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-09T15:00:00Z', + 'precipitation': 3.6, + 'precipitation_probability': 45.0, + 'temperature': 30.6, + 'templow': 21.0, + 'uv_index': 6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-11T15:00:00Z', + 'precipitation': 0.7, + 'precipitation_probability': 47.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-12T15:00:00Z', + 'precipitation': 7.7, + 'precipitation_probability': 37.0, + 'temperature': 30.4, + 'templow': 22.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-13T15:00:00Z', + 'precipitation': 0.6, + 'precipitation_probability': 45.0, + 'temperature': 31.0, + 'templow': 22.6, + 'uv_index': 6, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'temperature': 31.5, + 'templow': 22.4, + 'uv_index': 7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2023-09-15T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 31.8, + 'templow': 23.3, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-16T15:00:00Z', + 'precipitation': 5.3, + 'precipitation_probability': 35.0, + 'temperature': 30.7, + 'templow': 23.2, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-17T15:00:00Z', + 'precipitation': 2.1, + 'precipitation_probability': 49.0, + 'temperature': 28.1, + 'templow': 22.5, + 'uv_index': 6, + }), + ]), + }) +# --- +# name: test_daily_forecast[get_forecasts] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 28.6, + 'templow': 21.2, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-09T15:00:00Z', + 'precipitation': 3.6, + 'precipitation_probability': 45.0, + 'temperature': 30.6, + 'templow': 21.0, + 'uv_index': 6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-11T15:00:00Z', + 'precipitation': 0.7, + 'precipitation_probability': 47.0, + 'temperature': 30.4, + 'templow': 23.1, + 'uv_index': 5, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-12T15:00:00Z', + 'precipitation': 7.7, + 'precipitation_probability': 37.0, + 'temperature': 30.4, + 'templow': 22.1, + 'uv_index': 6, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2023-09-13T15:00:00Z', + 'precipitation': 0.6, + 'precipitation_probability': 45.0, + 'temperature': 31.0, + 'templow': 22.6, + 'uv_index': 6, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'temperature': 31.5, + 'templow': 22.4, + 'uv_index': 7, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2023-09-15T15:00:00Z', + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'temperature': 31.8, + 'templow': 23.3, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-16T15:00:00Z', + 'precipitation': 5.3, + 'precipitation_probability': 35.0, + 'temperature': 30.7, + 'templow': 23.2, + 'uv_index': 8, + }), + dict({ + 'condition': 'lightning', + 'datetime': '2023-09-17T15:00:00Z', + 'precipitation': 2.1, + 'precipitation_probability': 49.0, + 'temperature': 28.1, + 'templow': 22.5, + 'uv_index': 6, + }), + ]), + }), + }) +# --- # name: test_hourly_forecast dict({ 'forecast': list([ @@ -4085,3 +4377,11977 @@ ]), }) # --- +# name: test_hourly_forecast[forecast] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T14:00:00Z', + 'dew_point': 21.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 264, + 'wind_gust_speed': 13.44, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 80.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 261, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.64, + }), + dict({ + 'apparent_temperature': 23.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.12, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 252, + 'wind_gust_speed': 11.15, + 'wind_speed': 6.14, + }), + dict({ + 'apparent_temperature': 23.5, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.7, + 'uv_index': 0, + 'wind_bearing': 248, + 'wind_gust_speed': 11.57, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T18:00:00Z', + 'dew_point': 20.8, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.05, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 12.42, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 23.0, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.3, + 'uv_index': 0, + 'wind_bearing': 224, + 'wind_gust_speed': 11.3, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T20:00:00Z', + 'dew_point': 20.4, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.31, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 221, + 'wind_gust_speed': 10.57, + 'wind_speed': 5.13, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T21:00:00Z', + 'dew_point': 20.5, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.55, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 10.63, + 'wind_speed': 5.7, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.79, + 'temperature': 22.8, + 'uv_index': 1, + 'wind_bearing': 258, + 'wind_gust_speed': 10.47, + 'wind_speed': 5.22, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T23:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.95, + 'temperature': 24.0, + 'uv_index': 2, + 'wind_bearing': 282, + 'wind_gust_speed': 12.74, + 'wind_speed': 5.71, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T00:00:00Z', + 'dew_point': 21.5, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.35, + 'temperature': 25.1, + 'uv_index': 3, + 'wind_bearing': 294, + 'wind_gust_speed': 13.87, + 'wind_speed': 6.53, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T01:00:00Z', + 'dew_point': 21.8, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 26.5, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 16.04, + 'wind_speed': 6.54, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T02:00:00Z', + 'dew_point': 22.0, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.23, + 'temperature': 27.6, + 'uv_index': 6, + 'wind_bearing': 314, + 'wind_gust_speed': 18.1, + 'wind_speed': 7.32, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T03:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.86, + 'temperature': 28.3, + 'uv_index': 6, + 'wind_bearing': 317, + 'wind_gust_speed': 20.77, + 'wind_speed': 9.1, + }), + dict({ + 'apparent_temperature': 31.5, + 'cloud_coverage': 69.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T04:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.65, + 'temperature': 28.6, + 'uv_index': 6, + 'wind_bearing': 311, + 'wind_gust_speed': 21.27, + 'wind_speed': 10.21, + }), + dict({ + 'apparent_temperature': 31.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T05:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.48, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 317, + 'wind_gust_speed': 19.62, + 'wind_speed': 10.53, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.54, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 335, + 'wind_gust_speed': 18.98, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.76, + 'temperature': 27.1, + 'uv_index': 2, + 'wind_bearing': 338, + 'wind_gust_speed': 17.04, + 'wind_speed': 7.75, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.05, + 'temperature': 26.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 14.75, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 344, + 'wind_gust_speed': 10.43, + 'wind_speed': 5.2, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.73, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 6.95, + 'wind_speed': 3.59, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 326, + 'wind_gust_speed': 5.27, + 'wind_speed': 2.1, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.52, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 5.48, + 'wind_speed': 0.93, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T13:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 188, + 'wind_gust_speed': 4.44, + 'wind_speed': 1.79, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 4.49, + 'wind_speed': 2.19, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.21, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 179, + 'wind_gust_speed': 5.32, + 'wind_speed': 2.65, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 173, + 'wind_gust_speed': 5.81, + 'wind_speed': 3.2, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.88, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 5.53, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.94, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 6.09, + 'wind_speed': 3.36, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T19:00:00Z', + 'dew_point': 20.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.96, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 6.83, + 'wind_speed': 3.71, + }), + dict({ + 'apparent_temperature': 22.5, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T20:00:00Z', + 'dew_point': 20.0, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 21.0, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 7.98, + 'wind_speed': 4.27, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T21:00:00Z', + 'dew_point': 20.2, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.61, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 8.4, + 'wind_speed': 4.69, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.87, + 'temperature': 23.1, + 'uv_index': 1, + 'wind_bearing': 150, + 'wind_gust_speed': 7.66, + 'wind_speed': 4.33, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 123, + 'wind_gust_speed': 9.63, + 'wind_speed': 3.91, + }), + dict({ + 'apparent_temperature': 30.4, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 105, + 'wind_gust_speed': 12.59, + 'wind_speed': 3.96, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T01:00:00Z', + 'dew_point': 22.9, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.79, + 'temperature': 28.9, + 'uv_index': 5, + 'wind_bearing': 99, + 'wind_gust_speed': 14.17, + 'wind_speed': 4.06, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T02:00:00Z', + 'dew_point': 22.9, + 'humidity': 66, + 'precipitation': 0.3, + 'precipitation_probability': 7.000000000000001, + 'pressure': 1011.29, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 93, + 'wind_gust_speed': 17.75, + 'wind_speed': 4.87, + }), + dict({ + 'apparent_temperature': 34.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T03:00:00Z', + 'dew_point': 23.1, + 'humidity': 64, + 'precipitation': 0.3, + 'precipitation_probability': 11.0, + 'pressure': 1010.78, + 'temperature': 30.6, + 'uv_index': 6, + 'wind_bearing': 78, + 'wind_gust_speed': 17.43, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T04:00:00Z', + 'dew_point': 23.2, + 'humidity': 66, + 'precipitation': 0.4, + 'precipitation_probability': 15.0, + 'pressure': 1010.37, + 'temperature': 30.3, + 'uv_index': 5, + 'wind_bearing': 60, + 'wind_gust_speed': 15.24, + 'wind_speed': 4.9, + }), + dict({ + 'apparent_temperature': 33.7, + 'cloud_coverage': 79.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T05:00:00Z', + 'dew_point': 23.3, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 17.0, + 'pressure': 1010.09, + 'temperature': 30.0, + 'uv_index': 4, + 'wind_bearing': 80, + 'wind_gust_speed': 13.53, + 'wind_speed': 5.98, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T06:00:00Z', + 'dew_point': 23.4, + 'humidity': 70, + 'precipitation': 1.0, + 'precipitation_probability': 17.0, + 'pressure': 1010.0, + 'temperature': 29.5, + 'uv_index': 3, + 'wind_bearing': 83, + 'wind_gust_speed': 12.55, + 'wind_speed': 6.84, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 88.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 73, + 'precipitation': 0.4, + 'precipitation_probability': 16.0, + 'pressure': 1010.27, + 'temperature': 28.7, + 'uv_index': 2, + 'wind_bearing': 90, + 'wind_gust_speed': 10.16, + 'wind_speed': 6.07, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T08:00:00Z', + 'dew_point': 23.2, + 'humidity': 77, + 'precipitation': 0.5, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.71, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 101, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.82, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 93.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T09:00:00Z', + 'dew_point': 23.2, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.9, + 'temperature': 26.5, + 'uv_index': 0, + 'wind_bearing': 128, + 'wind_gust_speed': 8.89, + 'wind_speed': 4.95, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T10:00:00Z', + 'dew_point': 23.0, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.12, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 134, + 'wind_gust_speed': 10.03, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.43, + 'temperature': 25.1, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 12.4, + 'wind_speed': 5.41, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T12:00:00Z', + 'dew_point': 22.5, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.58, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 16.36, + 'wind_speed': 6.31, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T13:00:00Z', + 'dew_point': 22.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 19.66, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.4, + 'temperature': 24.3, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 21.15, + 'wind_speed': 7.46, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'dew_point': 22.0, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.26, + 'wind_speed': 7.84, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.01, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 23.53, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T17:00:00Z', + 'dew_point': 21.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.78, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 22.83, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T18:00:00Z', + 'dew_point': 21.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.69, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.7, + 'wind_speed': 8.7, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T19:00:00Z', + 'dew_point': 21.4, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.77, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 24.24, + 'wind_speed': 8.74, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.89, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 23.99, + 'wind_speed': 8.81, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T21:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.1, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 25.55, + 'wind_speed': 9.05, + }), + dict({ + 'apparent_temperature': 27.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 24.6, + 'uv_index': 1, + 'wind_bearing': 140, + 'wind_gust_speed': 29.08, + 'wind_speed': 10.37, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.36, + 'temperature': 25.9, + 'uv_index': 2, + 'wind_bearing': 140, + 'wind_gust_speed': 34.13, + 'wind_speed': 12.56, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T00:00:00Z', + 'dew_point': 22.3, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 27.2, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 38.2, + 'wind_speed': 15.65, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T01:00:00Z', + 'dew_point': 22.3, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 37.55, + 'wind_speed': 15.78, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 143, + 'wind_gust_speed': 35.86, + 'wind_speed': 15.41, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T03:00:00Z', + 'dew_point': 22.5, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.61, + 'temperature': 30.3, + 'uv_index': 6, + 'wind_bearing': 141, + 'wind_gust_speed': 35.88, + 'wind_speed': 15.51, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T04:00:00Z', + 'dew_point': 22.6, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.36, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 140, + 'wind_gust_speed': 35.99, + 'wind_speed': 15.75, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T05:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.11, + 'temperature': 30.1, + 'uv_index': 4, + 'wind_bearing': 137, + 'wind_gust_speed': 33.61, + 'wind_speed': 15.36, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T06:00:00Z', + 'dew_point': 22.5, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.98, + 'temperature': 30.0, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 32.61, + 'wind_speed': 14.98, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.13, + 'temperature': 29.2, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 28.1, + 'wind_speed': 13.88, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 28.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 24.22, + 'wind_speed': 13.02, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T09:00:00Z', + 'dew_point': 21.9, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.81, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 22.5, + 'wind_speed': 11.94, + }), + dict({ + 'apparent_temperature': 28.8, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T10:00:00Z', + 'dew_point': 21.7, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 21.47, + 'wind_speed': 11.25, + }), + dict({ + 'apparent_temperature': 28.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.71, + 'wind_speed': 12.39, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.67, + 'wind_speed': 12.83, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T13:00:00Z', + 'dew_point': 21.7, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 23.34, + 'wind_speed': 12.62, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.83, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.9, + 'wind_speed': 12.07, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T15:00:00Z', + 'dew_point': 21.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.74, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.01, + 'wind_speed': 11.19, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T16:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.56, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 21.29, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T17:00:00Z', + 'dew_point': 21.5, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.35, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 20.52, + 'wind_speed': 10.5, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 20.04, + 'wind_speed': 10.51, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T19:00:00Z', + 'dew_point': 21.3, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 12.0, + 'pressure': 1011.37, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 18.07, + 'wind_speed': 10.13, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T20:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.2, + 'precipitation_probability': 13.0, + 'pressure': 1011.53, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 16.86, + 'wind_speed': 10.34, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T21:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.71, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 16.66, + 'wind_speed': 10.68, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T22:00:00Z', + 'dew_point': 21.9, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 24.4, + 'uv_index': 1, + 'wind_bearing': 137, + 'wind_gust_speed': 17.21, + 'wind_speed': 10.61, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.05, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 19.23, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 29.5, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.07, + 'temperature': 26.6, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 20.61, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 82.0, + 'condition': 'rainy', + 'datetime': '2023-09-12T01:00:00Z', + 'dew_point': 23.1, + 'humidity': 75, + 'precipitation': 0.2, + 'precipitation_probability': 16.0, + 'pressure': 1011.89, + 'temperature': 27.9, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 23.35, + 'wind_speed': 11.98, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 29.0, + 'uv_index': 5, + 'wind_bearing': 143, + 'wind_gust_speed': 26.45, + 'wind_speed': 13.01, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.15, + 'temperature': 29.8, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 28.95, + 'wind_speed': 13.9, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.79, + 'temperature': 30.2, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 27.9, + 'wind_speed': 13.95, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T05:00:00Z', + 'dew_point': 23.1, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.43, + 'temperature': 30.4, + 'uv_index': 4, + 'wind_bearing': 140, + 'wind_gust_speed': 26.53, + 'wind_speed': 13.78, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T06:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.21, + 'temperature': 30.1, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 24.56, + 'wind_speed': 13.74, + }), + dict({ + 'apparent_temperature': 32.0, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.26, + 'temperature': 29.1, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 22.78, + 'wind_speed': 13.21, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.51, + 'temperature': 28.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 19.92, + 'wind_speed': 12.0, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T09:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.8, + 'temperature': 27.2, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 17.65, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T10:00:00Z', + 'dew_point': 21.4, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 15.87, + 'wind_speed': 10.23, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T11:00:00Z', + 'dew_point': 21.3, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1011.79, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 13.9, + 'wind_speed': 9.39, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T12:00:00Z', + 'dew_point': 21.2, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 47.0, + 'pressure': 1012.12, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.32, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1012.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.18, + 'wind_speed': 8.59, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T14:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.09, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.84, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T15:00:00Z', + 'dew_point': 21.3, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.99, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.93, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T16:00:00Z', + 'dew_point': 21.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 16.74, + 'wind_speed': 9.49, + }), + dict({ + 'apparent_temperature': 24.7, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T17:00:00Z', + 'dew_point': 20.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.75, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 17.45, + 'wind_speed': 9.12, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.04, + 'wind_speed': 8.68, + }), + dict({ + 'apparent_temperature': 24.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 16.8, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T20:00:00Z', + 'dew_point': 20.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.23, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.35, + 'wind_speed': 8.36, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T21:00:00Z', + 'dew_point': 20.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.49, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 14.09, + 'wind_speed': 7.77, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T22:00:00Z', + 'dew_point': 21.0, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.72, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 152, + 'wind_gust_speed': 14.04, + 'wind_speed': 7.25, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T23:00:00Z', + 'dew_point': 21.4, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 25.5, + 'uv_index': 2, + 'wind_bearing': 149, + 'wind_gust_speed': 15.31, + 'wind_speed': 7.14, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-13T00:00:00Z', + 'dew_point': 21.8, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 27.1, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 16.42, + 'wind_speed': 6.89, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T01:00:00Z', + 'dew_point': 22.0, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.65, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 137, + 'wind_gust_speed': 18.64, + 'wind_speed': 6.65, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T02:00:00Z', + 'dew_point': 21.9, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.26, + 'temperature': 29.4, + 'uv_index': 5, + 'wind_bearing': 128, + 'wind_gust_speed': 21.69, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 33.0, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T03:00:00Z', + 'dew_point': 21.9, + 'humidity': 62, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.88, + 'temperature': 30.1, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 23.41, + 'wind_speed': 7.33, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T04:00:00Z', + 'dew_point': 22.0, + 'humidity': 61, + 'precipitation': 0.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.55, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 56, + 'wind_gust_speed': 23.1, + 'wind_speed': 8.09, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 61, + 'precipitation': 1.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.29, + 'temperature': 30.2, + 'uv_index': 4, + 'wind_bearing': 20, + 'wind_gust_speed': 21.81, + 'wind_speed': 9.46, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T06:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 2.3, + 'precipitation_probability': 11.0, + 'pressure': 1011.17, + 'temperature': 29.7, + 'uv_index': 3, + 'wind_bearing': 20, + 'wind_gust_speed': 19.72, + 'wind_speed': 9.8, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 69.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T07:00:00Z', + 'dew_point': 22.4, + 'humidity': 68, + 'precipitation': 1.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.32, + 'temperature': 28.8, + 'uv_index': 1, + 'wind_bearing': 18, + 'wind_gust_speed': 17.55, + 'wind_speed': 9.23, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T08:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.6, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 27, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.05, + }), + dict({ + 'apparent_temperature': 29.4, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T09:00:00Z', + 'dew_point': 23.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 32, + 'wind_gust_speed': 12.17, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T10:00:00Z', + 'dew_point': 22.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.3, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 69, + 'wind_gust_speed': 11.64, + 'wind_speed': 6.69, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.71, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.23, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.96, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.47, + 'wind_speed': 5.73, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T13:00:00Z', + 'dew_point': 22.3, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.03, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 13.57, + 'wind_speed': 5.66, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.99, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 15.07, + 'wind_speed': 5.83, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T15:00:00Z', + 'dew_point': 22.2, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.95, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 16.06, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T16:00:00Z', + 'dew_point': 22.0, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.9, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 16.05, + 'wind_speed': 5.75, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T17:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.52, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T18:00:00Z', + 'dew_point': 21.8, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.87, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.01, + 'wind_speed': 5.32, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 22.8, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.39, + 'wind_speed': 5.33, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.22, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.79, + 'wind_speed': 5.43, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.12, + 'wind_speed': 5.52, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T22:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.59, + 'temperature': 24.3, + 'uv_index': 1, + 'wind_bearing': 147, + 'wind_gust_speed': 16.14, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T23:00:00Z', + 'dew_point': 22.4, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.74, + 'temperature': 25.7, + 'uv_index': 2, + 'wind_bearing': 146, + 'wind_gust_speed': 19.09, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.78, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 143, + 'wind_gust_speed': 21.6, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T01:00:00Z', + 'dew_point': 23.2, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.61, + 'temperature': 28.7, + 'uv_index': 5, + 'wind_bearing': 138, + 'wind_gust_speed': 23.36, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T02:00:00Z', + 'dew_point': 23.2, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.32, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 24.72, + 'wind_speed': 4.99, + }), + dict({ + 'apparent_temperature': 34.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T03:00:00Z', + 'dew_point': 23.3, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.04, + 'temperature': 30.7, + 'uv_index': 6, + 'wind_bearing': 354, + 'wind_gust_speed': 25.23, + 'wind_speed': 4.74, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.77, + 'temperature': 31.0, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 24.6, + 'wind_speed': 4.79, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 60.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T05:00:00Z', + 'dew_point': 23.2, + 'humidity': 64, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1012.53, + 'temperature': 30.7, + 'uv_index': 5, + 'wind_bearing': 336, + 'wind_gust_speed': 23.28, + 'wind_speed': 5.07, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 59.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T06:00:00Z', + 'dew_point': 23.1, + 'humidity': 66, + 'precipitation': 0.2, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1012.49, + 'temperature': 30.2, + 'uv_index': 3, + 'wind_bearing': 336, + 'wind_gust_speed': 22.05, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 32.9, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T07:00:00Z', + 'dew_point': 23.0, + 'humidity': 68, + 'precipitation': 0.2, + 'precipitation_probability': 40.0, + 'pressure': 1012.73, + 'temperature': 29.5, + 'uv_index': 2, + 'wind_bearing': 339, + 'wind_gust_speed': 21.18, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T08:00:00Z', + 'dew_point': 22.8, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 45.0, + 'pressure': 1013.16, + 'temperature': 28.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 20.35, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T09:00:00Z', + 'dew_point': 22.5, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1013.62, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 347, + 'wind_gust_speed': 19.42, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T10:00:00Z', + 'dew_point': 22.4, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.09, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 348, + 'wind_gust_speed': 18.19, + 'wind_speed': 5.31, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T11:00:00Z', + 'dew_point': 22.4, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.56, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 177, + 'wind_gust_speed': 16.79, + 'wind_speed': 4.28, + }), + dict({ + 'apparent_temperature': 27.5, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.87, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 15.61, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T13:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.91, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 14.7, + 'wind_speed': 4.11, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T14:00:00Z', + 'dew_point': 21.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.8, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 13.81, + 'wind_speed': 4.97, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T15:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.66, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 170, + 'wind_gust_speed': 12.88, + 'wind_speed': 5.57, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T16:00:00Z', + 'dew_point': 21.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.54, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 12.0, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T17:00:00Z', + 'dew_point': 21.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.45, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 11.43, + 'wind_speed': 5.48, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 44.0, + 'pressure': 1014.45, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 11.42, + 'wind_speed': 5.38, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T19:00:00Z', + 'dew_point': 21.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'pressure': 1014.63, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.15, + 'wind_speed': 5.39, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T20:00:00Z', + 'dew_point': 21.8, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 51.0, + 'pressure': 1014.91, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 13.54, + 'wind_speed': 5.45, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T21:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 42.0, + 'pressure': 1015.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 15.48, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T22:00:00Z', + 'dew_point': 22.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 28.999999999999996, + 'pressure': 1015.4, + 'temperature': 25.7, + 'uv_index': 1, + 'wind_bearing': 158, + 'wind_gust_speed': 17.86, + 'wind_speed': 5.84, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 77, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.54, + 'temperature': 27.2, + 'uv_index': 2, + 'wind_bearing': 155, + 'wind_gust_speed': 20.19, + 'wind_speed': 6.09, + }), + dict({ + 'apparent_temperature': 32.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T00:00:00Z', + 'dew_point': 23.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.55, + 'temperature': 28.6, + 'uv_index': 4, + 'wind_bearing': 152, + 'wind_gust_speed': 21.83, + 'wind_speed': 6.42, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T01:00:00Z', + 'dew_point': 23.5, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.35, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 144, + 'wind_gust_speed': 22.56, + 'wind_speed': 6.91, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.0, + 'temperature': 30.4, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.83, + 'wind_speed': 7.47, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.62, + 'temperature': 30.9, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.98, + 'wind_speed': 7.95, + }), + dict({ + 'apparent_temperature': 35.4, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T04:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 31.3, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 23.21, + 'wind_speed': 8.44, + }), + dict({ + 'apparent_temperature': 35.6, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T05:00:00Z', + 'dew_point': 23.7, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.95, + 'temperature': 31.5, + 'uv_index': 5, + 'wind_bearing': 344, + 'wind_gust_speed': 23.46, + 'wind_speed': 8.95, + }), + dict({ + 'apparent_temperature': 35.1, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T06:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.83, + 'temperature': 31.1, + 'uv_index': 3, + 'wind_bearing': 347, + 'wind_gust_speed': 23.64, + 'wind_speed': 9.13, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.96, + 'temperature': 30.3, + 'uv_index': 2, + 'wind_bearing': 350, + 'wind_gust_speed': 23.66, + 'wind_speed': 8.78, + }), + dict({ + 'apparent_temperature': 32.4, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T08:00:00Z', + 'dew_point': 23.1, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 29.0, + 'uv_index': 0, + 'wind_bearing': 356, + 'wind_gust_speed': 23.51, + 'wind_speed': 8.13, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T09:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.61, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 3, + 'wind_gust_speed': 23.21, + 'wind_speed': 7.48, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T10:00:00Z', + 'dew_point': 22.8, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.02, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 20, + 'wind_gust_speed': 22.68, + 'wind_speed': 6.83, + }), + dict({ + 'apparent_temperature': 29.2, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.43, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 129, + 'wind_gust_speed': 22.04, + 'wind_speed': 6.1, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T12:00:00Z', + 'dew_point': 22.7, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.71, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.64, + 'wind_speed': 5.6, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T13:00:00Z', + 'dew_point': 23.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.52, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 16.35, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T14:00:00Z', + 'dew_point': 22.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.37, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 17.11, + 'wind_speed': 5.79, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.21, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 17.32, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 16.6, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T17:00:00Z', + 'dew_point': 22.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.95, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 219, + 'wind_gust_speed': 15.52, + 'wind_speed': 4.62, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T18:00:00Z', + 'dew_point': 22.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.88, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 216, + 'wind_gust_speed': 14.64, + 'wind_speed': 4.32, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T19:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.91, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 198, + 'wind_gust_speed': 14.06, + 'wind_speed': 4.73, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T20:00:00Z', + 'dew_point': 22.4, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.99, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 189, + 'wind_gust_speed': 13.7, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T21:00:00Z', + 'dew_point': 22.5, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 13.77, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.12, + 'temperature': 25.5, + 'uv_index': 1, + 'wind_bearing': 179, + 'wind_gust_speed': 14.38, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 52.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.13, + 'temperature': 26.9, + 'uv_index': 2, + 'wind_bearing': 170, + 'wind_gust_speed': 15.2, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.04, + 'temperature': 28.0, + 'uv_index': 4, + 'wind_bearing': 155, + 'wind_gust_speed': 15.85, + 'wind_speed': 4.76, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 24.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T01:00:00Z', + 'dew_point': 22.6, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.52, + 'temperature': 29.2, + 'uv_index': 6, + 'wind_bearing': 110, + 'wind_gust_speed': 16.27, + 'wind_speed': 6.81, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 16.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.01, + 'temperature': 30.2, + 'uv_index': 8, + 'wind_bearing': 30, + 'wind_gust_speed': 16.55, + 'wind_speed': 6.86, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T03:00:00Z', + 'dew_point': 22.0, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.45, + 'temperature': 31.1, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.52, + 'wind_speed': 6.8, + }), + dict({ + 'apparent_temperature': 34.7, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T04:00:00Z', + 'dew_point': 21.9, + 'humidity': 57, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 31.5, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.08, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.39, + 'temperature': 31.8, + 'uv_index': 6, + 'wind_bearing': 20, + 'wind_gust_speed': 15.48, + 'wind_speed': 6.45, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T06:00:00Z', + 'dew_point': 21.7, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.11, + 'temperature': 31.4, + 'uv_index': 4, + 'wind_bearing': 26, + 'wind_gust_speed': 15.08, + 'wind_speed': 6.43, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 7.000000000000001, + 'condition': 'sunny', + 'datetime': '2023-09-16T07:00:00Z', + 'dew_point': 21.7, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.15, + 'temperature': 30.7, + 'uv_index': 2, + 'wind_bearing': 39, + 'wind_gust_speed': 14.88, + 'wind_speed': 6.61, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.41, + 'temperature': 29.6, + 'uv_index': 0, + 'wind_bearing': 72, + 'wind_gust_speed': 14.82, + 'wind_speed': 6.95, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T09:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.75, + 'temperature': 28.5, + 'uv_index': 0, + 'wind_bearing': 116, + 'wind_gust_speed': 15.13, + 'wind_speed': 7.45, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 13.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T10:00:00Z', + 'dew_point': 22.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.13, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 16.09, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.47, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.37, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 29.3, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T12:00:00Z', + 'dew_point': 22.9, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.6, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 18.29, + 'wind_speed': 9.21, + }), + dict({ + 'apparent_temperature': 28.7, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T13:00:00Z', + 'dew_point': 23.0, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 25.7, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 18.49, + 'wind_speed': 8.96, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T14:00:00Z', + 'dew_point': 22.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.47, + 'wind_speed': 8.45, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.79, + 'wind_speed': 8.1, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.1, + 'temperature': 24.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 19.81, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T17:00:00Z', + 'dew_point': 22.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.68, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 20.96, + 'wind_speed': 8.3, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T18:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.41, + 'wind_speed': 8.24, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T19:00:00Z', + 'dew_point': 22.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 20.42, + 'wind_speed': 7.62, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T20:00:00Z', + 'dew_point': 22.6, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 18.61, + 'wind_speed': 6.66, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T21:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 17.14, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 26.0, + 'uv_index': 1, + 'wind_bearing': 161, + 'wind_gust_speed': 16.78, + 'wind_speed': 5.5, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.51, + 'temperature': 27.5, + 'uv_index': 2, + 'wind_bearing': 165, + 'wind_gust_speed': 17.21, + 'wind_speed': 5.56, + }), + dict({ + 'apparent_temperature': 31.7, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T00:00:00Z', + 'dew_point': 22.8, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 28.5, + 'uv_index': 4, + 'wind_bearing': 174, + 'wind_gust_speed': 17.96, + 'wind_speed': 6.04, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T01:00:00Z', + 'dew_point': 22.7, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.4, + 'uv_index': 6, + 'wind_bearing': 192, + 'wind_gust_speed': 19.15, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 28.999999999999996, + 'condition': 'sunny', + 'datetime': '2023-09-17T02:00:00Z', + 'dew_point': 22.8, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 30.1, + 'uv_index': 7, + 'wind_bearing': 225, + 'wind_gust_speed': 20.89, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T03:00:00Z', + 'dew_point': 22.8, + 'humidity': 63, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1009.75, + 'temperature': 30.7, + 'uv_index': 8, + 'wind_bearing': 264, + 'wind_gust_speed': 22.67, + 'wind_speed': 10.27, + }), + dict({ + 'apparent_temperature': 33.9, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T04:00:00Z', + 'dew_point': 22.5, + 'humidity': 62, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1009.18, + 'temperature': 30.5, + 'uv_index': 7, + 'wind_bearing': 293, + 'wind_gust_speed': 23.93, + 'wind_speed': 10.82, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T05:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.6, + 'precipitation_probability': 12.0, + 'pressure': 1008.71, + 'temperature': 30.1, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 24.39, + 'wind_speed': 10.72, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 64, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.46, + 'temperature': 29.6, + 'uv_index': 3, + 'wind_bearing': 312, + 'wind_gust_speed': 23.9, + 'wind_speed': 10.28, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 47.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.53, + 'temperature': 28.9, + 'uv_index': 1, + 'wind_bearing': 312, + 'wind_gust_speed': 22.3, + 'wind_speed': 9.59, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 70, + 'precipitation': 0.6, + 'precipitation_probability': 15.0, + 'pressure': 1008.82, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 19.73, + 'wind_speed': 8.58, + }), + dict({ + 'apparent_temperature': 29.6, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 74, + 'precipitation': 0.5, + 'precipitation_probability': 15.0, + 'pressure': 1009.21, + 'temperature': 27.0, + 'uv_index': 0, + 'wind_bearing': 291, + 'wind_gust_speed': 16.49, + 'wind_speed': 7.34, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 78, + 'precipitation': 0.4, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1009.65, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 12.71, + 'wind_speed': 5.91, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T11:00:00Z', + 'dew_point': 21.9, + 'humidity': 82, + 'precipitation': 0.3, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.04, + 'temperature': 25.3, + 'uv_index': 0, + 'wind_bearing': 212, + 'wind_gust_speed': 9.16, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T12:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.3, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1010.24, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 192, + 'wind_gust_speed': 7.09, + 'wind_speed': 3.62, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T13:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1010.15, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 185, + 'wind_gust_speed': 7.2, + 'wind_speed': 3.27, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 44.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T14:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1009.87, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.22, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 49.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T15:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.2, + 'precipitation_probability': 31.0, + 'pressure': 1009.56, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 180, + 'wind_gust_speed': 9.21, + 'wind_speed': 3.3, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 94, + 'precipitation': 0.2, + 'precipitation_probability': 33.0, + 'pressure': 1009.29, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 9.0, + 'wind_speed': 3.46, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T17:00:00Z', + 'dew_point': 21.7, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 35.0, + 'pressure': 1009.09, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 186, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T18:00:00Z', + 'dew_point': 21.6, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 37.0, + 'pressure': 1009.01, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 7.99, + 'wind_speed': 4.07, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.07, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 258, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.55, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T20:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.23, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 8.77, + 'wind_speed': 5.17, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 38.0, + 'pressure': 1009.47, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 318, + 'wind_gust_speed': 9.69, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 30.0, + 'pressure': 1009.77, + 'temperature': 24.2, + 'uv_index': 1, + 'wind_bearing': 324, + 'wind_gust_speed': 10.88, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 83, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.09, + 'temperature': 25.1, + 'uv_index': 2, + 'wind_bearing': 329, + 'wind_gust_speed': 12.21, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T00:00:00Z', + 'dew_point': 21.9, + 'humidity': 80, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.33, + 'temperature': 25.7, + 'uv_index': 3, + 'wind_bearing': 332, + 'wind_gust_speed': 13.52, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T01:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1007.43, + 'temperature': 27.2, + 'uv_index': 5, + 'wind_bearing': 330, + 'wind_gust_speed': 11.36, + 'wind_speed': 11.36, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T02:00:00Z', + 'dew_point': 21.6, + 'humidity': 70, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1007.05, + 'temperature': 27.5, + 'uv_index': 6, + 'wind_bearing': 332, + 'wind_gust_speed': 12.06, + 'wind_speed': 12.06, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T03:00:00Z', + 'dew_point': 21.6, + 'humidity': 69, + 'precipitation': 0.5, + 'precipitation_probability': 10.0, + 'pressure': 1006.67, + 'temperature': 27.8, + 'uv_index': 6, + 'wind_bearing': 333, + 'wind_gust_speed': 12.81, + 'wind_speed': 12.81, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T04:00:00Z', + 'dew_point': 21.5, + 'humidity': 68, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1006.28, + 'temperature': 28.0, + 'uv_index': 5, + 'wind_bearing': 335, + 'wind_gust_speed': 13.68, + 'wind_speed': 13.68, + }), + dict({ + 'apparent_temperature': 30.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T05:00:00Z', + 'dew_point': 21.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1005.89, + 'temperature': 28.1, + 'uv_index': 4, + 'wind_bearing': 336, + 'wind_gust_speed': 14.61, + 'wind_speed': 14.61, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T06:00:00Z', + 'dew_point': 21.2, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 27.0, + 'pressure': 1005.67, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 338, + 'wind_gust_speed': 15.25, + 'wind_speed': 15.25, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T07:00:00Z', + 'dew_point': 21.3, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1005.74, + 'temperature': 27.4, + 'uv_index': 1, + 'wind_bearing': 339, + 'wind_gust_speed': 15.45, + 'wind_speed': 15.45, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T08:00:00Z', + 'dew_point': 21.4, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1005.98, + 'temperature': 26.7, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.38, + 'wind_speed': 15.38, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T09:00:00Z', + 'dew_point': 21.6, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.22, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.27, + 'wind_speed': 15.27, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T10:00:00Z', + 'dew_point': 21.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.44, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 15.09, + 'wind_speed': 15.09, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T11:00:00Z', + 'dew_point': 21.7, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.66, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 336, + 'wind_gust_speed': 14.88, + 'wind_speed': 14.88, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.79, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 333, + 'wind_gust_speed': 14.91, + 'wind_speed': 14.91, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.36, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 83, + 'wind_gust_speed': 4.58, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T14:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.96, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 4.74, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 24.5, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T15:00:00Z', + 'dew_point': 20.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.6, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 152, + 'wind_gust_speed': 5.63, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T16:00:00Z', + 'dew_point': 20.7, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 22.3, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 6.02, + 'wind_speed': 6.02, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T17:00:00Z', + 'dew_point': 20.4, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.2, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 6.15, + 'wind_speed': 6.15, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T18:00:00Z', + 'dew_point': 20.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.08, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 167, + 'wind_gust_speed': 6.48, + 'wind_speed': 6.48, + }), + dict({ + 'apparent_temperature': 23.2, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T19:00:00Z', + 'dew_point': 19.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.04, + 'temperature': 21.8, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 7.51, + 'wind_speed': 7.51, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 99.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T20:00:00Z', + 'dew_point': 19.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.05, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 8.73, + 'wind_speed': 8.73, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 98.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T21:00:00Z', + 'dew_point': 19.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.06, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 9.21, + 'wind_speed': 9.11, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 96.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T22:00:00Z', + 'dew_point': 19.7, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 171, + 'wind_gust_speed': 9.03, + 'wind_speed': 7.91, + }), + ]), + }), + }) +# --- +# name: test_hourly_forecast[get_forecast] + dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T14:00:00Z', + 'dew_point': 21.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 264, + 'wind_gust_speed': 13.44, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 80.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 261, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.64, + }), + dict({ + 'apparent_temperature': 23.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.12, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 252, + 'wind_gust_speed': 11.15, + 'wind_speed': 6.14, + }), + dict({ + 'apparent_temperature': 23.5, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.7, + 'uv_index': 0, + 'wind_bearing': 248, + 'wind_gust_speed': 11.57, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T18:00:00Z', + 'dew_point': 20.8, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.05, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 12.42, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 23.0, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.3, + 'uv_index': 0, + 'wind_bearing': 224, + 'wind_gust_speed': 11.3, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T20:00:00Z', + 'dew_point': 20.4, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.31, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 221, + 'wind_gust_speed': 10.57, + 'wind_speed': 5.13, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T21:00:00Z', + 'dew_point': 20.5, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.55, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 10.63, + 'wind_speed': 5.7, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.79, + 'temperature': 22.8, + 'uv_index': 1, + 'wind_bearing': 258, + 'wind_gust_speed': 10.47, + 'wind_speed': 5.22, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T23:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.95, + 'temperature': 24.0, + 'uv_index': 2, + 'wind_bearing': 282, + 'wind_gust_speed': 12.74, + 'wind_speed': 5.71, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T00:00:00Z', + 'dew_point': 21.5, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.35, + 'temperature': 25.1, + 'uv_index': 3, + 'wind_bearing': 294, + 'wind_gust_speed': 13.87, + 'wind_speed': 6.53, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T01:00:00Z', + 'dew_point': 21.8, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 26.5, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 16.04, + 'wind_speed': 6.54, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T02:00:00Z', + 'dew_point': 22.0, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.23, + 'temperature': 27.6, + 'uv_index': 6, + 'wind_bearing': 314, + 'wind_gust_speed': 18.1, + 'wind_speed': 7.32, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T03:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.86, + 'temperature': 28.3, + 'uv_index': 6, + 'wind_bearing': 317, + 'wind_gust_speed': 20.77, + 'wind_speed': 9.1, + }), + dict({ + 'apparent_temperature': 31.5, + 'cloud_coverage': 69.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T04:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.65, + 'temperature': 28.6, + 'uv_index': 6, + 'wind_bearing': 311, + 'wind_gust_speed': 21.27, + 'wind_speed': 10.21, + }), + dict({ + 'apparent_temperature': 31.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T05:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.48, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 317, + 'wind_gust_speed': 19.62, + 'wind_speed': 10.53, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.54, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 335, + 'wind_gust_speed': 18.98, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.76, + 'temperature': 27.1, + 'uv_index': 2, + 'wind_bearing': 338, + 'wind_gust_speed': 17.04, + 'wind_speed': 7.75, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.05, + 'temperature': 26.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 14.75, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 344, + 'wind_gust_speed': 10.43, + 'wind_speed': 5.2, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.73, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 6.95, + 'wind_speed': 3.59, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 326, + 'wind_gust_speed': 5.27, + 'wind_speed': 2.1, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.52, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 5.48, + 'wind_speed': 0.93, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T13:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 188, + 'wind_gust_speed': 4.44, + 'wind_speed': 1.79, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 4.49, + 'wind_speed': 2.19, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.21, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 179, + 'wind_gust_speed': 5.32, + 'wind_speed': 2.65, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 173, + 'wind_gust_speed': 5.81, + 'wind_speed': 3.2, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.88, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 5.53, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.94, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 6.09, + 'wind_speed': 3.36, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T19:00:00Z', + 'dew_point': 20.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.96, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 6.83, + 'wind_speed': 3.71, + }), + dict({ + 'apparent_temperature': 22.5, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T20:00:00Z', + 'dew_point': 20.0, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 21.0, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 7.98, + 'wind_speed': 4.27, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T21:00:00Z', + 'dew_point': 20.2, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.61, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 8.4, + 'wind_speed': 4.69, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.87, + 'temperature': 23.1, + 'uv_index': 1, + 'wind_bearing': 150, + 'wind_gust_speed': 7.66, + 'wind_speed': 4.33, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 123, + 'wind_gust_speed': 9.63, + 'wind_speed': 3.91, + }), + dict({ + 'apparent_temperature': 30.4, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 105, + 'wind_gust_speed': 12.59, + 'wind_speed': 3.96, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T01:00:00Z', + 'dew_point': 22.9, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.79, + 'temperature': 28.9, + 'uv_index': 5, + 'wind_bearing': 99, + 'wind_gust_speed': 14.17, + 'wind_speed': 4.06, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T02:00:00Z', + 'dew_point': 22.9, + 'humidity': 66, + 'precipitation': 0.3, + 'precipitation_probability': 7.000000000000001, + 'pressure': 1011.29, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 93, + 'wind_gust_speed': 17.75, + 'wind_speed': 4.87, + }), + dict({ + 'apparent_temperature': 34.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T03:00:00Z', + 'dew_point': 23.1, + 'humidity': 64, + 'precipitation': 0.3, + 'precipitation_probability': 11.0, + 'pressure': 1010.78, + 'temperature': 30.6, + 'uv_index': 6, + 'wind_bearing': 78, + 'wind_gust_speed': 17.43, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T04:00:00Z', + 'dew_point': 23.2, + 'humidity': 66, + 'precipitation': 0.4, + 'precipitation_probability': 15.0, + 'pressure': 1010.37, + 'temperature': 30.3, + 'uv_index': 5, + 'wind_bearing': 60, + 'wind_gust_speed': 15.24, + 'wind_speed': 4.9, + }), + dict({ + 'apparent_temperature': 33.7, + 'cloud_coverage': 79.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T05:00:00Z', + 'dew_point': 23.3, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 17.0, + 'pressure': 1010.09, + 'temperature': 30.0, + 'uv_index': 4, + 'wind_bearing': 80, + 'wind_gust_speed': 13.53, + 'wind_speed': 5.98, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T06:00:00Z', + 'dew_point': 23.4, + 'humidity': 70, + 'precipitation': 1.0, + 'precipitation_probability': 17.0, + 'pressure': 1010.0, + 'temperature': 29.5, + 'uv_index': 3, + 'wind_bearing': 83, + 'wind_gust_speed': 12.55, + 'wind_speed': 6.84, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 88.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 73, + 'precipitation': 0.4, + 'precipitation_probability': 16.0, + 'pressure': 1010.27, + 'temperature': 28.7, + 'uv_index': 2, + 'wind_bearing': 90, + 'wind_gust_speed': 10.16, + 'wind_speed': 6.07, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T08:00:00Z', + 'dew_point': 23.2, + 'humidity': 77, + 'precipitation': 0.5, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.71, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 101, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.82, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 93.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T09:00:00Z', + 'dew_point': 23.2, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.9, + 'temperature': 26.5, + 'uv_index': 0, + 'wind_bearing': 128, + 'wind_gust_speed': 8.89, + 'wind_speed': 4.95, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T10:00:00Z', + 'dew_point': 23.0, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.12, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 134, + 'wind_gust_speed': 10.03, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.43, + 'temperature': 25.1, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 12.4, + 'wind_speed': 5.41, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T12:00:00Z', + 'dew_point': 22.5, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.58, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 16.36, + 'wind_speed': 6.31, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T13:00:00Z', + 'dew_point': 22.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 19.66, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.4, + 'temperature': 24.3, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 21.15, + 'wind_speed': 7.46, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'dew_point': 22.0, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.26, + 'wind_speed': 7.84, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.01, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 23.53, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T17:00:00Z', + 'dew_point': 21.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.78, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 22.83, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T18:00:00Z', + 'dew_point': 21.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.69, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.7, + 'wind_speed': 8.7, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T19:00:00Z', + 'dew_point': 21.4, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.77, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 24.24, + 'wind_speed': 8.74, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.89, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 23.99, + 'wind_speed': 8.81, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T21:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.1, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 25.55, + 'wind_speed': 9.05, + }), + dict({ + 'apparent_temperature': 27.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 24.6, + 'uv_index': 1, + 'wind_bearing': 140, + 'wind_gust_speed': 29.08, + 'wind_speed': 10.37, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.36, + 'temperature': 25.9, + 'uv_index': 2, + 'wind_bearing': 140, + 'wind_gust_speed': 34.13, + 'wind_speed': 12.56, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T00:00:00Z', + 'dew_point': 22.3, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 27.2, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 38.2, + 'wind_speed': 15.65, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T01:00:00Z', + 'dew_point': 22.3, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 37.55, + 'wind_speed': 15.78, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 143, + 'wind_gust_speed': 35.86, + 'wind_speed': 15.41, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T03:00:00Z', + 'dew_point': 22.5, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.61, + 'temperature': 30.3, + 'uv_index': 6, + 'wind_bearing': 141, + 'wind_gust_speed': 35.88, + 'wind_speed': 15.51, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T04:00:00Z', + 'dew_point': 22.6, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.36, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 140, + 'wind_gust_speed': 35.99, + 'wind_speed': 15.75, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T05:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.11, + 'temperature': 30.1, + 'uv_index': 4, + 'wind_bearing': 137, + 'wind_gust_speed': 33.61, + 'wind_speed': 15.36, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T06:00:00Z', + 'dew_point': 22.5, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.98, + 'temperature': 30.0, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 32.61, + 'wind_speed': 14.98, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.13, + 'temperature': 29.2, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 28.1, + 'wind_speed': 13.88, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 28.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 24.22, + 'wind_speed': 13.02, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T09:00:00Z', + 'dew_point': 21.9, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.81, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 22.5, + 'wind_speed': 11.94, + }), + dict({ + 'apparent_temperature': 28.8, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T10:00:00Z', + 'dew_point': 21.7, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 21.47, + 'wind_speed': 11.25, + }), + dict({ + 'apparent_temperature': 28.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.71, + 'wind_speed': 12.39, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.67, + 'wind_speed': 12.83, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T13:00:00Z', + 'dew_point': 21.7, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 23.34, + 'wind_speed': 12.62, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.83, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.9, + 'wind_speed': 12.07, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T15:00:00Z', + 'dew_point': 21.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.74, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.01, + 'wind_speed': 11.19, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T16:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.56, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 21.29, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T17:00:00Z', + 'dew_point': 21.5, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.35, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 20.52, + 'wind_speed': 10.5, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 20.04, + 'wind_speed': 10.51, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T19:00:00Z', + 'dew_point': 21.3, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 12.0, + 'pressure': 1011.37, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 18.07, + 'wind_speed': 10.13, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T20:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.2, + 'precipitation_probability': 13.0, + 'pressure': 1011.53, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 16.86, + 'wind_speed': 10.34, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T21:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.71, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 16.66, + 'wind_speed': 10.68, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T22:00:00Z', + 'dew_point': 21.9, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 24.4, + 'uv_index': 1, + 'wind_bearing': 137, + 'wind_gust_speed': 17.21, + 'wind_speed': 10.61, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.05, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 19.23, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 29.5, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.07, + 'temperature': 26.6, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 20.61, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 82.0, + 'condition': 'rainy', + 'datetime': '2023-09-12T01:00:00Z', + 'dew_point': 23.1, + 'humidity': 75, + 'precipitation': 0.2, + 'precipitation_probability': 16.0, + 'pressure': 1011.89, + 'temperature': 27.9, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 23.35, + 'wind_speed': 11.98, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 29.0, + 'uv_index': 5, + 'wind_bearing': 143, + 'wind_gust_speed': 26.45, + 'wind_speed': 13.01, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.15, + 'temperature': 29.8, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 28.95, + 'wind_speed': 13.9, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.79, + 'temperature': 30.2, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 27.9, + 'wind_speed': 13.95, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T05:00:00Z', + 'dew_point': 23.1, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.43, + 'temperature': 30.4, + 'uv_index': 4, + 'wind_bearing': 140, + 'wind_gust_speed': 26.53, + 'wind_speed': 13.78, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T06:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.21, + 'temperature': 30.1, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 24.56, + 'wind_speed': 13.74, + }), + dict({ + 'apparent_temperature': 32.0, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.26, + 'temperature': 29.1, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 22.78, + 'wind_speed': 13.21, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.51, + 'temperature': 28.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 19.92, + 'wind_speed': 12.0, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T09:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.8, + 'temperature': 27.2, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 17.65, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T10:00:00Z', + 'dew_point': 21.4, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 15.87, + 'wind_speed': 10.23, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T11:00:00Z', + 'dew_point': 21.3, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1011.79, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 13.9, + 'wind_speed': 9.39, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T12:00:00Z', + 'dew_point': 21.2, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 47.0, + 'pressure': 1012.12, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.32, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1012.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.18, + 'wind_speed': 8.59, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T14:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.09, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.84, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T15:00:00Z', + 'dew_point': 21.3, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.99, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.93, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T16:00:00Z', + 'dew_point': 21.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 16.74, + 'wind_speed': 9.49, + }), + dict({ + 'apparent_temperature': 24.7, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T17:00:00Z', + 'dew_point': 20.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.75, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 17.45, + 'wind_speed': 9.12, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.04, + 'wind_speed': 8.68, + }), + dict({ + 'apparent_temperature': 24.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 16.8, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T20:00:00Z', + 'dew_point': 20.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.23, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.35, + 'wind_speed': 8.36, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T21:00:00Z', + 'dew_point': 20.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.49, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 14.09, + 'wind_speed': 7.77, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T22:00:00Z', + 'dew_point': 21.0, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.72, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 152, + 'wind_gust_speed': 14.04, + 'wind_speed': 7.25, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T23:00:00Z', + 'dew_point': 21.4, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 25.5, + 'uv_index': 2, + 'wind_bearing': 149, + 'wind_gust_speed': 15.31, + 'wind_speed': 7.14, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-13T00:00:00Z', + 'dew_point': 21.8, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 27.1, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 16.42, + 'wind_speed': 6.89, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T01:00:00Z', + 'dew_point': 22.0, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.65, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 137, + 'wind_gust_speed': 18.64, + 'wind_speed': 6.65, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T02:00:00Z', + 'dew_point': 21.9, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.26, + 'temperature': 29.4, + 'uv_index': 5, + 'wind_bearing': 128, + 'wind_gust_speed': 21.69, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 33.0, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T03:00:00Z', + 'dew_point': 21.9, + 'humidity': 62, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.88, + 'temperature': 30.1, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 23.41, + 'wind_speed': 7.33, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T04:00:00Z', + 'dew_point': 22.0, + 'humidity': 61, + 'precipitation': 0.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.55, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 56, + 'wind_gust_speed': 23.1, + 'wind_speed': 8.09, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 61, + 'precipitation': 1.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.29, + 'temperature': 30.2, + 'uv_index': 4, + 'wind_bearing': 20, + 'wind_gust_speed': 21.81, + 'wind_speed': 9.46, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T06:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 2.3, + 'precipitation_probability': 11.0, + 'pressure': 1011.17, + 'temperature': 29.7, + 'uv_index': 3, + 'wind_bearing': 20, + 'wind_gust_speed': 19.72, + 'wind_speed': 9.8, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 69.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T07:00:00Z', + 'dew_point': 22.4, + 'humidity': 68, + 'precipitation': 1.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.32, + 'temperature': 28.8, + 'uv_index': 1, + 'wind_bearing': 18, + 'wind_gust_speed': 17.55, + 'wind_speed': 9.23, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T08:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.6, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 27, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.05, + }), + dict({ + 'apparent_temperature': 29.4, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T09:00:00Z', + 'dew_point': 23.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 32, + 'wind_gust_speed': 12.17, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T10:00:00Z', + 'dew_point': 22.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.3, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 69, + 'wind_gust_speed': 11.64, + 'wind_speed': 6.69, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.71, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.23, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.96, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.47, + 'wind_speed': 5.73, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T13:00:00Z', + 'dew_point': 22.3, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.03, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 13.57, + 'wind_speed': 5.66, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.99, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 15.07, + 'wind_speed': 5.83, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T15:00:00Z', + 'dew_point': 22.2, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.95, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 16.06, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T16:00:00Z', + 'dew_point': 22.0, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.9, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 16.05, + 'wind_speed': 5.75, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T17:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.52, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T18:00:00Z', + 'dew_point': 21.8, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.87, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.01, + 'wind_speed': 5.32, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 22.8, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.39, + 'wind_speed': 5.33, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.22, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.79, + 'wind_speed': 5.43, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.12, + 'wind_speed': 5.52, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T22:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.59, + 'temperature': 24.3, + 'uv_index': 1, + 'wind_bearing': 147, + 'wind_gust_speed': 16.14, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T23:00:00Z', + 'dew_point': 22.4, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.74, + 'temperature': 25.7, + 'uv_index': 2, + 'wind_bearing': 146, + 'wind_gust_speed': 19.09, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.78, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 143, + 'wind_gust_speed': 21.6, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T01:00:00Z', + 'dew_point': 23.2, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.61, + 'temperature': 28.7, + 'uv_index': 5, + 'wind_bearing': 138, + 'wind_gust_speed': 23.36, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T02:00:00Z', + 'dew_point': 23.2, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.32, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 24.72, + 'wind_speed': 4.99, + }), + dict({ + 'apparent_temperature': 34.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T03:00:00Z', + 'dew_point': 23.3, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.04, + 'temperature': 30.7, + 'uv_index': 6, + 'wind_bearing': 354, + 'wind_gust_speed': 25.23, + 'wind_speed': 4.74, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.77, + 'temperature': 31.0, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 24.6, + 'wind_speed': 4.79, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 60.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T05:00:00Z', + 'dew_point': 23.2, + 'humidity': 64, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1012.53, + 'temperature': 30.7, + 'uv_index': 5, + 'wind_bearing': 336, + 'wind_gust_speed': 23.28, + 'wind_speed': 5.07, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 59.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T06:00:00Z', + 'dew_point': 23.1, + 'humidity': 66, + 'precipitation': 0.2, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1012.49, + 'temperature': 30.2, + 'uv_index': 3, + 'wind_bearing': 336, + 'wind_gust_speed': 22.05, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 32.9, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T07:00:00Z', + 'dew_point': 23.0, + 'humidity': 68, + 'precipitation': 0.2, + 'precipitation_probability': 40.0, + 'pressure': 1012.73, + 'temperature': 29.5, + 'uv_index': 2, + 'wind_bearing': 339, + 'wind_gust_speed': 21.18, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T08:00:00Z', + 'dew_point': 22.8, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 45.0, + 'pressure': 1013.16, + 'temperature': 28.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 20.35, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T09:00:00Z', + 'dew_point': 22.5, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1013.62, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 347, + 'wind_gust_speed': 19.42, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T10:00:00Z', + 'dew_point': 22.4, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.09, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 348, + 'wind_gust_speed': 18.19, + 'wind_speed': 5.31, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T11:00:00Z', + 'dew_point': 22.4, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.56, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 177, + 'wind_gust_speed': 16.79, + 'wind_speed': 4.28, + }), + dict({ + 'apparent_temperature': 27.5, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.87, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 15.61, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T13:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.91, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 14.7, + 'wind_speed': 4.11, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T14:00:00Z', + 'dew_point': 21.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.8, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 13.81, + 'wind_speed': 4.97, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T15:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.66, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 170, + 'wind_gust_speed': 12.88, + 'wind_speed': 5.57, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T16:00:00Z', + 'dew_point': 21.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.54, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 12.0, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T17:00:00Z', + 'dew_point': 21.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.45, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 11.43, + 'wind_speed': 5.48, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 44.0, + 'pressure': 1014.45, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 11.42, + 'wind_speed': 5.38, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T19:00:00Z', + 'dew_point': 21.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'pressure': 1014.63, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.15, + 'wind_speed': 5.39, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T20:00:00Z', + 'dew_point': 21.8, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 51.0, + 'pressure': 1014.91, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 13.54, + 'wind_speed': 5.45, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T21:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 42.0, + 'pressure': 1015.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 15.48, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T22:00:00Z', + 'dew_point': 22.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 28.999999999999996, + 'pressure': 1015.4, + 'temperature': 25.7, + 'uv_index': 1, + 'wind_bearing': 158, + 'wind_gust_speed': 17.86, + 'wind_speed': 5.84, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 77, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.54, + 'temperature': 27.2, + 'uv_index': 2, + 'wind_bearing': 155, + 'wind_gust_speed': 20.19, + 'wind_speed': 6.09, + }), + dict({ + 'apparent_temperature': 32.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T00:00:00Z', + 'dew_point': 23.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.55, + 'temperature': 28.6, + 'uv_index': 4, + 'wind_bearing': 152, + 'wind_gust_speed': 21.83, + 'wind_speed': 6.42, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T01:00:00Z', + 'dew_point': 23.5, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.35, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 144, + 'wind_gust_speed': 22.56, + 'wind_speed': 6.91, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.0, + 'temperature': 30.4, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.83, + 'wind_speed': 7.47, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.62, + 'temperature': 30.9, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.98, + 'wind_speed': 7.95, + }), + dict({ + 'apparent_temperature': 35.4, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T04:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 31.3, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 23.21, + 'wind_speed': 8.44, + }), + dict({ + 'apparent_temperature': 35.6, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T05:00:00Z', + 'dew_point': 23.7, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.95, + 'temperature': 31.5, + 'uv_index': 5, + 'wind_bearing': 344, + 'wind_gust_speed': 23.46, + 'wind_speed': 8.95, + }), + dict({ + 'apparent_temperature': 35.1, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T06:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.83, + 'temperature': 31.1, + 'uv_index': 3, + 'wind_bearing': 347, + 'wind_gust_speed': 23.64, + 'wind_speed': 9.13, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.96, + 'temperature': 30.3, + 'uv_index': 2, + 'wind_bearing': 350, + 'wind_gust_speed': 23.66, + 'wind_speed': 8.78, + }), + dict({ + 'apparent_temperature': 32.4, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T08:00:00Z', + 'dew_point': 23.1, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 29.0, + 'uv_index': 0, + 'wind_bearing': 356, + 'wind_gust_speed': 23.51, + 'wind_speed': 8.13, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T09:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.61, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 3, + 'wind_gust_speed': 23.21, + 'wind_speed': 7.48, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T10:00:00Z', + 'dew_point': 22.8, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.02, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 20, + 'wind_gust_speed': 22.68, + 'wind_speed': 6.83, + }), + dict({ + 'apparent_temperature': 29.2, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.43, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 129, + 'wind_gust_speed': 22.04, + 'wind_speed': 6.1, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T12:00:00Z', + 'dew_point': 22.7, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.71, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.64, + 'wind_speed': 5.6, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T13:00:00Z', + 'dew_point': 23.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.52, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 16.35, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T14:00:00Z', + 'dew_point': 22.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.37, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 17.11, + 'wind_speed': 5.79, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.21, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 17.32, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 16.6, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T17:00:00Z', + 'dew_point': 22.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.95, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 219, + 'wind_gust_speed': 15.52, + 'wind_speed': 4.62, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T18:00:00Z', + 'dew_point': 22.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.88, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 216, + 'wind_gust_speed': 14.64, + 'wind_speed': 4.32, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T19:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.91, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 198, + 'wind_gust_speed': 14.06, + 'wind_speed': 4.73, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T20:00:00Z', + 'dew_point': 22.4, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.99, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 189, + 'wind_gust_speed': 13.7, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T21:00:00Z', + 'dew_point': 22.5, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 13.77, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.12, + 'temperature': 25.5, + 'uv_index': 1, + 'wind_bearing': 179, + 'wind_gust_speed': 14.38, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 52.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.13, + 'temperature': 26.9, + 'uv_index': 2, + 'wind_bearing': 170, + 'wind_gust_speed': 15.2, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.04, + 'temperature': 28.0, + 'uv_index': 4, + 'wind_bearing': 155, + 'wind_gust_speed': 15.85, + 'wind_speed': 4.76, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 24.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T01:00:00Z', + 'dew_point': 22.6, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.52, + 'temperature': 29.2, + 'uv_index': 6, + 'wind_bearing': 110, + 'wind_gust_speed': 16.27, + 'wind_speed': 6.81, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 16.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.01, + 'temperature': 30.2, + 'uv_index': 8, + 'wind_bearing': 30, + 'wind_gust_speed': 16.55, + 'wind_speed': 6.86, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T03:00:00Z', + 'dew_point': 22.0, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.45, + 'temperature': 31.1, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.52, + 'wind_speed': 6.8, + }), + dict({ + 'apparent_temperature': 34.7, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T04:00:00Z', + 'dew_point': 21.9, + 'humidity': 57, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 31.5, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.08, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.39, + 'temperature': 31.8, + 'uv_index': 6, + 'wind_bearing': 20, + 'wind_gust_speed': 15.48, + 'wind_speed': 6.45, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T06:00:00Z', + 'dew_point': 21.7, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.11, + 'temperature': 31.4, + 'uv_index': 4, + 'wind_bearing': 26, + 'wind_gust_speed': 15.08, + 'wind_speed': 6.43, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 7.000000000000001, + 'condition': 'sunny', + 'datetime': '2023-09-16T07:00:00Z', + 'dew_point': 21.7, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.15, + 'temperature': 30.7, + 'uv_index': 2, + 'wind_bearing': 39, + 'wind_gust_speed': 14.88, + 'wind_speed': 6.61, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.41, + 'temperature': 29.6, + 'uv_index': 0, + 'wind_bearing': 72, + 'wind_gust_speed': 14.82, + 'wind_speed': 6.95, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T09:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.75, + 'temperature': 28.5, + 'uv_index': 0, + 'wind_bearing': 116, + 'wind_gust_speed': 15.13, + 'wind_speed': 7.45, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 13.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T10:00:00Z', + 'dew_point': 22.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.13, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 16.09, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.47, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.37, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 29.3, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T12:00:00Z', + 'dew_point': 22.9, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.6, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 18.29, + 'wind_speed': 9.21, + }), + dict({ + 'apparent_temperature': 28.7, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T13:00:00Z', + 'dew_point': 23.0, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 25.7, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 18.49, + 'wind_speed': 8.96, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T14:00:00Z', + 'dew_point': 22.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.47, + 'wind_speed': 8.45, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.79, + 'wind_speed': 8.1, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.1, + 'temperature': 24.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 19.81, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T17:00:00Z', + 'dew_point': 22.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.68, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 20.96, + 'wind_speed': 8.3, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T18:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.41, + 'wind_speed': 8.24, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T19:00:00Z', + 'dew_point': 22.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 20.42, + 'wind_speed': 7.62, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T20:00:00Z', + 'dew_point': 22.6, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 18.61, + 'wind_speed': 6.66, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T21:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 17.14, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 26.0, + 'uv_index': 1, + 'wind_bearing': 161, + 'wind_gust_speed': 16.78, + 'wind_speed': 5.5, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.51, + 'temperature': 27.5, + 'uv_index': 2, + 'wind_bearing': 165, + 'wind_gust_speed': 17.21, + 'wind_speed': 5.56, + }), + dict({ + 'apparent_temperature': 31.7, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T00:00:00Z', + 'dew_point': 22.8, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 28.5, + 'uv_index': 4, + 'wind_bearing': 174, + 'wind_gust_speed': 17.96, + 'wind_speed': 6.04, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T01:00:00Z', + 'dew_point': 22.7, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.4, + 'uv_index': 6, + 'wind_bearing': 192, + 'wind_gust_speed': 19.15, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 28.999999999999996, + 'condition': 'sunny', + 'datetime': '2023-09-17T02:00:00Z', + 'dew_point': 22.8, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 30.1, + 'uv_index': 7, + 'wind_bearing': 225, + 'wind_gust_speed': 20.89, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T03:00:00Z', + 'dew_point': 22.8, + 'humidity': 63, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1009.75, + 'temperature': 30.7, + 'uv_index': 8, + 'wind_bearing': 264, + 'wind_gust_speed': 22.67, + 'wind_speed': 10.27, + }), + dict({ + 'apparent_temperature': 33.9, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T04:00:00Z', + 'dew_point': 22.5, + 'humidity': 62, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1009.18, + 'temperature': 30.5, + 'uv_index': 7, + 'wind_bearing': 293, + 'wind_gust_speed': 23.93, + 'wind_speed': 10.82, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T05:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.6, + 'precipitation_probability': 12.0, + 'pressure': 1008.71, + 'temperature': 30.1, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 24.39, + 'wind_speed': 10.72, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 64, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.46, + 'temperature': 29.6, + 'uv_index': 3, + 'wind_bearing': 312, + 'wind_gust_speed': 23.9, + 'wind_speed': 10.28, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 47.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.53, + 'temperature': 28.9, + 'uv_index': 1, + 'wind_bearing': 312, + 'wind_gust_speed': 22.3, + 'wind_speed': 9.59, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 70, + 'precipitation': 0.6, + 'precipitation_probability': 15.0, + 'pressure': 1008.82, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 19.73, + 'wind_speed': 8.58, + }), + dict({ + 'apparent_temperature': 29.6, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 74, + 'precipitation': 0.5, + 'precipitation_probability': 15.0, + 'pressure': 1009.21, + 'temperature': 27.0, + 'uv_index': 0, + 'wind_bearing': 291, + 'wind_gust_speed': 16.49, + 'wind_speed': 7.34, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 78, + 'precipitation': 0.4, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1009.65, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 12.71, + 'wind_speed': 5.91, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T11:00:00Z', + 'dew_point': 21.9, + 'humidity': 82, + 'precipitation': 0.3, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.04, + 'temperature': 25.3, + 'uv_index': 0, + 'wind_bearing': 212, + 'wind_gust_speed': 9.16, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T12:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.3, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1010.24, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 192, + 'wind_gust_speed': 7.09, + 'wind_speed': 3.62, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T13:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1010.15, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 185, + 'wind_gust_speed': 7.2, + 'wind_speed': 3.27, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 44.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T14:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1009.87, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.22, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 49.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T15:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.2, + 'precipitation_probability': 31.0, + 'pressure': 1009.56, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 180, + 'wind_gust_speed': 9.21, + 'wind_speed': 3.3, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 94, + 'precipitation': 0.2, + 'precipitation_probability': 33.0, + 'pressure': 1009.29, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 9.0, + 'wind_speed': 3.46, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T17:00:00Z', + 'dew_point': 21.7, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 35.0, + 'pressure': 1009.09, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 186, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T18:00:00Z', + 'dew_point': 21.6, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 37.0, + 'pressure': 1009.01, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 7.99, + 'wind_speed': 4.07, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.07, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 258, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.55, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T20:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.23, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 8.77, + 'wind_speed': 5.17, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 38.0, + 'pressure': 1009.47, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 318, + 'wind_gust_speed': 9.69, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 30.0, + 'pressure': 1009.77, + 'temperature': 24.2, + 'uv_index': 1, + 'wind_bearing': 324, + 'wind_gust_speed': 10.88, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 83, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.09, + 'temperature': 25.1, + 'uv_index': 2, + 'wind_bearing': 329, + 'wind_gust_speed': 12.21, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T00:00:00Z', + 'dew_point': 21.9, + 'humidity': 80, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.33, + 'temperature': 25.7, + 'uv_index': 3, + 'wind_bearing': 332, + 'wind_gust_speed': 13.52, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T01:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1007.43, + 'temperature': 27.2, + 'uv_index': 5, + 'wind_bearing': 330, + 'wind_gust_speed': 11.36, + 'wind_speed': 11.36, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T02:00:00Z', + 'dew_point': 21.6, + 'humidity': 70, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1007.05, + 'temperature': 27.5, + 'uv_index': 6, + 'wind_bearing': 332, + 'wind_gust_speed': 12.06, + 'wind_speed': 12.06, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T03:00:00Z', + 'dew_point': 21.6, + 'humidity': 69, + 'precipitation': 0.5, + 'precipitation_probability': 10.0, + 'pressure': 1006.67, + 'temperature': 27.8, + 'uv_index': 6, + 'wind_bearing': 333, + 'wind_gust_speed': 12.81, + 'wind_speed': 12.81, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T04:00:00Z', + 'dew_point': 21.5, + 'humidity': 68, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1006.28, + 'temperature': 28.0, + 'uv_index': 5, + 'wind_bearing': 335, + 'wind_gust_speed': 13.68, + 'wind_speed': 13.68, + }), + dict({ + 'apparent_temperature': 30.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T05:00:00Z', + 'dew_point': 21.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1005.89, + 'temperature': 28.1, + 'uv_index': 4, + 'wind_bearing': 336, + 'wind_gust_speed': 14.61, + 'wind_speed': 14.61, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T06:00:00Z', + 'dew_point': 21.2, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 27.0, + 'pressure': 1005.67, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 338, + 'wind_gust_speed': 15.25, + 'wind_speed': 15.25, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T07:00:00Z', + 'dew_point': 21.3, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1005.74, + 'temperature': 27.4, + 'uv_index': 1, + 'wind_bearing': 339, + 'wind_gust_speed': 15.45, + 'wind_speed': 15.45, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T08:00:00Z', + 'dew_point': 21.4, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1005.98, + 'temperature': 26.7, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.38, + 'wind_speed': 15.38, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T09:00:00Z', + 'dew_point': 21.6, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.22, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.27, + 'wind_speed': 15.27, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T10:00:00Z', + 'dew_point': 21.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.44, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 15.09, + 'wind_speed': 15.09, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T11:00:00Z', + 'dew_point': 21.7, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.66, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 336, + 'wind_gust_speed': 14.88, + 'wind_speed': 14.88, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.79, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 333, + 'wind_gust_speed': 14.91, + 'wind_speed': 14.91, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.36, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 83, + 'wind_gust_speed': 4.58, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T14:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.96, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 4.74, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 24.5, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T15:00:00Z', + 'dew_point': 20.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.6, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 152, + 'wind_gust_speed': 5.63, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T16:00:00Z', + 'dew_point': 20.7, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 22.3, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 6.02, + 'wind_speed': 6.02, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T17:00:00Z', + 'dew_point': 20.4, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.2, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 6.15, + 'wind_speed': 6.15, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T18:00:00Z', + 'dew_point': 20.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.08, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 167, + 'wind_gust_speed': 6.48, + 'wind_speed': 6.48, + }), + dict({ + 'apparent_temperature': 23.2, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T19:00:00Z', + 'dew_point': 19.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.04, + 'temperature': 21.8, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 7.51, + 'wind_speed': 7.51, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 99.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T20:00:00Z', + 'dew_point': 19.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.05, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 8.73, + 'wind_speed': 8.73, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 98.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T21:00:00Z', + 'dew_point': 19.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.06, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 9.21, + 'wind_speed': 9.11, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 96.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T22:00:00Z', + 'dew_point': 19.7, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 171, + 'wind_gust_speed': 9.03, + 'wind_speed': 7.91, + }), + ]), + }) +# --- +# name: test_hourly_forecast[get_forecasts] + dict({ + 'weather.home': dict({ + 'forecast': list([ + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T14:00:00Z', + 'dew_point': 21.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 264, + 'wind_gust_speed': 13.44, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 80.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.24, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 261, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.64, + }), + dict({ + 'apparent_temperature': 23.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.12, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 252, + 'wind_gust_speed': 11.15, + 'wind_speed': 6.14, + }), + dict({ + 'apparent_temperature': 23.5, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.7, + 'uv_index': 0, + 'wind_bearing': 248, + 'wind_gust_speed': 11.57, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T18:00:00Z', + 'dew_point': 20.8, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.05, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 12.42, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 23.0, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.03, + 'temperature': 21.3, + 'uv_index': 0, + 'wind_bearing': 224, + 'wind_gust_speed': 11.3, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T20:00:00Z', + 'dew_point': 20.4, + 'humidity': 96, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.31, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 221, + 'wind_gust_speed': 10.57, + 'wind_speed': 5.13, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T21:00:00Z', + 'dew_point': 20.5, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.55, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 237, + 'wind_gust_speed': 10.63, + 'wind_speed': 5.7, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-08T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.79, + 'temperature': 22.8, + 'uv_index': 1, + 'wind_bearing': 258, + 'wind_gust_speed': 10.47, + 'wind_speed': 5.22, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-08T23:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.95, + 'temperature': 24.0, + 'uv_index': 2, + 'wind_bearing': 282, + 'wind_gust_speed': 12.74, + 'wind_speed': 5.71, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T00:00:00Z', + 'dew_point': 21.5, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.35, + 'temperature': 25.1, + 'uv_index': 3, + 'wind_bearing': 294, + 'wind_gust_speed': 13.87, + 'wind_speed': 6.53, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T01:00:00Z', + 'dew_point': 21.8, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 26.5, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 16.04, + 'wind_speed': 6.54, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T02:00:00Z', + 'dew_point': 22.0, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.23, + 'temperature': 27.6, + 'uv_index': 6, + 'wind_bearing': 314, + 'wind_gust_speed': 18.1, + 'wind_speed': 7.32, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T03:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.86, + 'temperature': 28.3, + 'uv_index': 6, + 'wind_bearing': 317, + 'wind_gust_speed': 20.77, + 'wind_speed': 9.1, + }), + dict({ + 'apparent_temperature': 31.5, + 'cloud_coverage': 69.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T04:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.65, + 'temperature': 28.6, + 'uv_index': 6, + 'wind_bearing': 311, + 'wind_gust_speed': 21.27, + 'wind_speed': 10.21, + }), + dict({ + 'apparent_temperature': 31.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T05:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.48, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 317, + 'wind_gust_speed': 19.62, + 'wind_speed': 10.53, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.54, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 335, + 'wind_gust_speed': 18.98, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.76, + 'temperature': 27.1, + 'uv_index': 2, + 'wind_bearing': 338, + 'wind_gust_speed': 17.04, + 'wind_speed': 7.75, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.05, + 'temperature': 26.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 14.75, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 344, + 'wind_gust_speed': 10.43, + 'wind_speed': 5.2, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.73, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 6.95, + 'wind_speed': 3.59, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 326, + 'wind_gust_speed': 5.27, + 'wind_speed': 2.1, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.52, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 5.48, + 'wind_speed': 0.93, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T13:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 188, + 'wind_gust_speed': 4.44, + 'wind_speed': 1.79, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 4.49, + 'wind_speed': 2.19, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T15:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.21, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 179, + 'wind_gust_speed': 5.32, + 'wind_speed': 2.65, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T16:00:00Z', + 'dew_point': 21.1, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 173, + 'wind_gust_speed': 5.81, + 'wind_speed': 3.2, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T17:00:00Z', + 'dew_point': 20.9, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.88, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 5.53, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 23.3, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.94, + 'temperature': 21.6, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 6.09, + 'wind_speed': 3.36, + }), + dict({ + 'apparent_temperature': 23.1, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T19:00:00Z', + 'dew_point': 20.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.96, + 'temperature': 21.4, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 6.83, + 'wind_speed': 3.71, + }), + dict({ + 'apparent_temperature': 22.5, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T20:00:00Z', + 'dew_point': 20.0, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 21.0, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 7.98, + 'wind_speed': 4.27, + }), + dict({ + 'apparent_temperature': 22.8, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T21:00:00Z', + 'dew_point': 20.2, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.61, + 'temperature': 21.2, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 8.4, + 'wind_speed': 4.69, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-09T22:00:00Z', + 'dew_point': 21.3, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.87, + 'temperature': 23.1, + 'uv_index': 1, + 'wind_bearing': 150, + 'wind_gust_speed': 7.66, + 'wind_speed': 4.33, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-09T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 123, + 'wind_gust_speed': 9.63, + 'wind_speed': 3.91, + }), + dict({ + 'apparent_temperature': 30.4, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 105, + 'wind_gust_speed': 12.59, + 'wind_speed': 3.96, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T01:00:00Z', + 'dew_point': 22.9, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.79, + 'temperature': 28.9, + 'uv_index': 5, + 'wind_bearing': 99, + 'wind_gust_speed': 14.17, + 'wind_speed': 4.06, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T02:00:00Z', + 'dew_point': 22.9, + 'humidity': 66, + 'precipitation': 0.3, + 'precipitation_probability': 7.000000000000001, + 'pressure': 1011.29, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 93, + 'wind_gust_speed': 17.75, + 'wind_speed': 4.87, + }), + dict({ + 'apparent_temperature': 34.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T03:00:00Z', + 'dew_point': 23.1, + 'humidity': 64, + 'precipitation': 0.3, + 'precipitation_probability': 11.0, + 'pressure': 1010.78, + 'temperature': 30.6, + 'uv_index': 6, + 'wind_bearing': 78, + 'wind_gust_speed': 17.43, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T04:00:00Z', + 'dew_point': 23.2, + 'humidity': 66, + 'precipitation': 0.4, + 'precipitation_probability': 15.0, + 'pressure': 1010.37, + 'temperature': 30.3, + 'uv_index': 5, + 'wind_bearing': 60, + 'wind_gust_speed': 15.24, + 'wind_speed': 4.9, + }), + dict({ + 'apparent_temperature': 33.7, + 'cloud_coverage': 79.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T05:00:00Z', + 'dew_point': 23.3, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 17.0, + 'pressure': 1010.09, + 'temperature': 30.0, + 'uv_index': 4, + 'wind_bearing': 80, + 'wind_gust_speed': 13.53, + 'wind_speed': 5.98, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T06:00:00Z', + 'dew_point': 23.4, + 'humidity': 70, + 'precipitation': 1.0, + 'precipitation_probability': 17.0, + 'pressure': 1010.0, + 'temperature': 29.5, + 'uv_index': 3, + 'wind_bearing': 83, + 'wind_gust_speed': 12.55, + 'wind_speed': 6.84, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 88.0, + 'condition': 'rainy', + 'datetime': '2023-09-10T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 73, + 'precipitation': 0.4, + 'precipitation_probability': 16.0, + 'pressure': 1010.27, + 'temperature': 28.7, + 'uv_index': 2, + 'wind_bearing': 90, + 'wind_gust_speed': 10.16, + 'wind_speed': 6.07, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T08:00:00Z', + 'dew_point': 23.2, + 'humidity': 77, + 'precipitation': 0.5, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.71, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 101, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.82, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 93.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T09:00:00Z', + 'dew_point': 23.2, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.9, + 'temperature': 26.5, + 'uv_index': 0, + 'wind_bearing': 128, + 'wind_gust_speed': 8.89, + 'wind_speed': 4.95, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T10:00:00Z', + 'dew_point': 23.0, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.12, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 134, + 'wind_gust_speed': 10.03, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.43, + 'temperature': 25.1, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 12.4, + 'wind_speed': 5.41, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T12:00:00Z', + 'dew_point': 22.5, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.58, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 16.36, + 'wind_speed': 6.31, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T13:00:00Z', + 'dew_point': 22.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 19.66, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.4, + 'temperature': 24.3, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 21.15, + 'wind_speed': 7.46, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T15:00:00Z', + 'dew_point': 22.0, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.26, + 'wind_speed': 7.84, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.01, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 23.53, + 'wind_speed': 8.63, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-10T17:00:00Z', + 'dew_point': 21.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.78, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 22.83, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T18:00:00Z', + 'dew_point': 21.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.69, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.7, + 'wind_speed': 8.7, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T19:00:00Z', + 'dew_point': 21.4, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.77, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 24.24, + 'wind_speed': 8.74, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.89, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 23.99, + 'wind_speed': 8.81, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T21:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.1, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 25.55, + 'wind_speed': 9.05, + }), + dict({ + 'apparent_temperature': 27.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 24.6, + 'uv_index': 1, + 'wind_bearing': 140, + 'wind_gust_speed': 29.08, + 'wind_speed': 10.37, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-10T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.36, + 'temperature': 25.9, + 'uv_index': 2, + 'wind_bearing': 140, + 'wind_gust_speed': 34.13, + 'wind_speed': 12.56, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T00:00:00Z', + 'dew_point': 22.3, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 27.2, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 38.2, + 'wind_speed': 15.65, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T01:00:00Z', + 'dew_point': 22.3, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 37.55, + 'wind_speed': 15.78, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 143, + 'wind_gust_speed': 35.86, + 'wind_speed': 15.41, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T03:00:00Z', + 'dew_point': 22.5, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.61, + 'temperature': 30.3, + 'uv_index': 6, + 'wind_bearing': 141, + 'wind_gust_speed': 35.88, + 'wind_speed': 15.51, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T04:00:00Z', + 'dew_point': 22.6, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.36, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 140, + 'wind_gust_speed': 35.99, + 'wind_speed': 15.75, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T05:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.11, + 'temperature': 30.1, + 'uv_index': 4, + 'wind_bearing': 137, + 'wind_gust_speed': 33.61, + 'wind_speed': 15.36, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T06:00:00Z', + 'dew_point': 22.5, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1009.98, + 'temperature': 30.0, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 32.61, + 'wind_speed': 14.98, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T07:00:00Z', + 'dew_point': 22.2, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.13, + 'temperature': 29.2, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 28.1, + 'wind_speed': 13.88, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T08:00:00Z', + 'dew_point': 22.1, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.48, + 'temperature': 28.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 24.22, + 'wind_speed': 13.02, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-11T09:00:00Z', + 'dew_point': 21.9, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.81, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 22.5, + 'wind_speed': 11.94, + }), + dict({ + 'apparent_temperature': 28.8, + 'cloud_coverage': 63.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T10:00:00Z', + 'dew_point': 21.7, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 137, + 'wind_gust_speed': 21.47, + 'wind_speed': 11.25, + }), + dict({ + 'apparent_temperature': 28.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T11:00:00Z', + 'dew_point': 21.8, + 'humidity': 80, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 22.71, + 'wind_speed': 12.39, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 23.67, + 'wind_speed': 12.83, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T13:00:00Z', + 'dew_point': 21.7, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.97, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 23.34, + 'wind_speed': 12.62, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T14:00:00Z', + 'dew_point': 21.7, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.83, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.9, + 'wind_speed': 12.07, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T15:00:00Z', + 'dew_point': 21.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.74, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 22.01, + 'wind_speed': 11.19, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T16:00:00Z', + 'dew_point': 21.6, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.56, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 21.29, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T17:00:00Z', + 'dew_point': 21.5, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.35, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 20.52, + 'wind_speed': 10.5, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.3, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 20.04, + 'wind_speed': 10.51, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T19:00:00Z', + 'dew_point': 21.3, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 12.0, + 'pressure': 1011.37, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 18.07, + 'wind_speed': 10.13, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T20:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.2, + 'precipitation_probability': 13.0, + 'pressure': 1011.53, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 16.86, + 'wind_speed': 10.34, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T21:00:00Z', + 'dew_point': 21.4, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.71, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 138, + 'wind_gust_speed': 16.66, + 'wind_speed': 10.68, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T22:00:00Z', + 'dew_point': 21.9, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 24.4, + 'uv_index': 1, + 'wind_bearing': 137, + 'wind_gust_speed': 17.21, + 'wind_speed': 10.61, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 78.0, + 'condition': 'cloudy', + 'datetime': '2023-09-11T23:00:00Z', + 'dew_point': 22.3, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.05, + 'temperature': 25.6, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 19.23, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 29.5, + 'cloud_coverage': 79.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T00:00:00Z', + 'dew_point': 22.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.07, + 'temperature': 26.6, + 'uv_index': 3, + 'wind_bearing': 140, + 'wind_gust_speed': 20.61, + 'wind_speed': 11.13, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 82.0, + 'condition': 'rainy', + 'datetime': '2023-09-12T01:00:00Z', + 'dew_point': 23.1, + 'humidity': 75, + 'precipitation': 0.2, + 'precipitation_probability': 16.0, + 'pressure': 1011.89, + 'temperature': 27.9, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 23.35, + 'wind_speed': 11.98, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 85.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.53, + 'temperature': 29.0, + 'uv_index': 5, + 'wind_bearing': 143, + 'wind_gust_speed': 26.45, + 'wind_speed': 13.01, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.15, + 'temperature': 29.8, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 28.95, + 'wind_speed': 13.9, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.79, + 'temperature': 30.2, + 'uv_index': 5, + 'wind_bearing': 141, + 'wind_gust_speed': 27.9, + 'wind_speed': 13.95, + }), + dict({ + 'apparent_temperature': 34.0, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T05:00:00Z', + 'dew_point': 23.1, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.43, + 'temperature': 30.4, + 'uv_index': 4, + 'wind_bearing': 140, + 'wind_gust_speed': 26.53, + 'wind_speed': 13.78, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T06:00:00Z', + 'dew_point': 22.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.21, + 'temperature': 30.1, + 'uv_index': 3, + 'wind_bearing': 138, + 'wind_gust_speed': 24.56, + 'wind_speed': 13.74, + }), + dict({ + 'apparent_temperature': 32.0, + 'cloud_coverage': 53.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.26, + 'temperature': 29.1, + 'uv_index': 2, + 'wind_bearing': 138, + 'wind_gust_speed': 22.78, + 'wind_speed': 13.21, + }), + dict({ + 'apparent_temperature': 30.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.51, + 'temperature': 28.1, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 19.92, + 'wind_speed': 12.0, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T09:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.8, + 'temperature': 27.2, + 'uv_index': 0, + 'wind_bearing': 141, + 'wind_gust_speed': 17.65, + 'wind_speed': 10.97, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T10:00:00Z', + 'dew_point': 21.4, + 'humidity': 75, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.23, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 143, + 'wind_gust_speed': 15.87, + 'wind_speed': 10.23, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T11:00:00Z', + 'dew_point': 21.3, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1011.79, + 'temperature': 25.4, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 13.9, + 'wind_speed': 9.39, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-12T12:00:00Z', + 'dew_point': 21.2, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 47.0, + 'pressure': 1012.12, + 'temperature': 24.7, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.32, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1012.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.18, + 'wind_speed': 8.59, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T14:00:00Z', + 'dew_point': 21.3, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.09, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 13.84, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T15:00:00Z', + 'dew_point': 21.3, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.99, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.93, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T16:00:00Z', + 'dew_point': 21.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 16.74, + 'wind_speed': 9.49, + }), + dict({ + 'apparent_temperature': 24.7, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T17:00:00Z', + 'dew_point': 20.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.75, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 146, + 'wind_gust_speed': 17.45, + 'wind_speed': 9.12, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T18:00:00Z', + 'dew_point': 20.7, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.77, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.04, + 'wind_speed': 8.68, + }), + dict({ + 'apparent_temperature': 24.1, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T19:00:00Z', + 'dew_point': 20.6, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.93, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 16.8, + 'wind_speed': 8.61, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T20:00:00Z', + 'dew_point': 20.5, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.23, + 'temperature': 22.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.35, + 'wind_speed': 8.36, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 75.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T21:00:00Z', + 'dew_point': 20.6, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.49, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 14.09, + 'wind_speed': 7.77, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T22:00:00Z', + 'dew_point': 21.0, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.72, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 152, + 'wind_gust_speed': 14.04, + 'wind_speed': 7.25, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-12T23:00:00Z', + 'dew_point': 21.4, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 25.5, + 'uv_index': 2, + 'wind_bearing': 149, + 'wind_gust_speed': 15.31, + 'wind_speed': 7.14, + }), + dict({ + 'apparent_temperature': 29.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-13T00:00:00Z', + 'dew_point': 21.8, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 27.1, + 'uv_index': 4, + 'wind_bearing': 141, + 'wind_gust_speed': 16.42, + 'wind_speed': 6.89, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T01:00:00Z', + 'dew_point': 22.0, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.65, + 'temperature': 28.4, + 'uv_index': 5, + 'wind_bearing': 137, + 'wind_gust_speed': 18.64, + 'wind_speed': 6.65, + }), + dict({ + 'apparent_temperature': 32.3, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T02:00:00Z', + 'dew_point': 21.9, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.26, + 'temperature': 29.4, + 'uv_index': 5, + 'wind_bearing': 128, + 'wind_gust_speed': 21.69, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 33.0, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T03:00:00Z', + 'dew_point': 21.9, + 'humidity': 62, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.88, + 'temperature': 30.1, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 23.41, + 'wind_speed': 7.33, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T04:00:00Z', + 'dew_point': 22.0, + 'humidity': 61, + 'precipitation': 0.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.55, + 'temperature': 30.4, + 'uv_index': 5, + 'wind_bearing': 56, + 'wind_gust_speed': 23.1, + 'wind_speed': 8.09, + }), + dict({ + 'apparent_temperature': 33.2, + 'cloud_coverage': 72.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 61, + 'precipitation': 1.9, + 'precipitation_probability': 12.0, + 'pressure': 1011.29, + 'temperature': 30.2, + 'uv_index': 4, + 'wind_bearing': 20, + 'wind_gust_speed': 21.81, + 'wind_speed': 9.46, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 74.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T06:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 2.3, + 'precipitation_probability': 11.0, + 'pressure': 1011.17, + 'temperature': 29.7, + 'uv_index': 3, + 'wind_bearing': 20, + 'wind_gust_speed': 19.72, + 'wind_speed': 9.8, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 69.0, + 'condition': 'rainy', + 'datetime': '2023-09-13T07:00:00Z', + 'dew_point': 22.4, + 'humidity': 68, + 'precipitation': 1.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.32, + 'temperature': 28.8, + 'uv_index': 1, + 'wind_bearing': 18, + 'wind_gust_speed': 17.55, + 'wind_speed': 9.23, + }), + dict({ + 'apparent_temperature': 30.8, + 'cloud_coverage': 73.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T08:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.8, + 'precipitation_probability': 10.0, + 'pressure': 1011.6, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 27, + 'wind_gust_speed': 15.08, + 'wind_speed': 8.05, + }), + dict({ + 'apparent_temperature': 29.4, + 'cloud_coverage': 76.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T09:00:00Z', + 'dew_point': 23.0, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.94, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 32, + 'wind_gust_speed': 12.17, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T10:00:00Z', + 'dew_point': 22.9, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.3, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 69, + 'wind_gust_speed': 11.64, + 'wind_speed': 6.69, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.71, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 11.91, + 'wind_speed': 6.23, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.96, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.47, + 'wind_speed': 5.73, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 82.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T13:00:00Z', + 'dew_point': 22.3, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.03, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 13.57, + 'wind_speed': 5.66, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 84.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T14:00:00Z', + 'dew_point': 22.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.99, + 'temperature': 23.9, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 15.07, + 'wind_speed': 5.83, + }), + dict({ + 'apparent_temperature': 26.1, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T15:00:00Z', + 'dew_point': 22.2, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.95, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 16.06, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 88.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T16:00:00Z', + 'dew_point': 22.0, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.9, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 153, + 'wind_gust_speed': 16.05, + 'wind_speed': 5.75, + }), + dict({ + 'apparent_temperature': 25.4, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T17:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.85, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 150, + 'wind_gust_speed': 15.52, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 92.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T18:00:00Z', + 'dew_point': 21.8, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.87, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 15.01, + 'wind_speed': 5.32, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 90.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 22.8, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.39, + 'wind_speed': 5.33, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 89.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T20:00:00Z', + 'dew_point': 21.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.22, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 13.79, + 'wind_speed': 5.43, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 86.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 147, + 'wind_gust_speed': 14.12, + 'wind_speed': 5.52, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 77.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T22:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.59, + 'temperature': 24.3, + 'uv_index': 1, + 'wind_bearing': 147, + 'wind_gust_speed': 16.14, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-13T23:00:00Z', + 'dew_point': 22.4, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.74, + 'temperature': 25.7, + 'uv_index': 2, + 'wind_bearing': 146, + 'wind_gust_speed': 19.09, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.78, + 'temperature': 27.4, + 'uv_index': 4, + 'wind_bearing': 143, + 'wind_gust_speed': 21.6, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 32.2, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T01:00:00Z', + 'dew_point': 23.2, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.61, + 'temperature': 28.7, + 'uv_index': 5, + 'wind_bearing': 138, + 'wind_gust_speed': 23.36, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 54.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T02:00:00Z', + 'dew_point': 23.2, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.32, + 'temperature': 29.9, + 'uv_index': 6, + 'wind_bearing': 111, + 'wind_gust_speed': 24.72, + 'wind_speed': 4.99, + }), + dict({ + 'apparent_temperature': 34.4, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T03:00:00Z', + 'dew_point': 23.3, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.04, + 'temperature': 30.7, + 'uv_index': 6, + 'wind_bearing': 354, + 'wind_gust_speed': 25.23, + 'wind_speed': 4.74, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T04:00:00Z', + 'dew_point': 23.4, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.77, + 'temperature': 31.0, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 24.6, + 'wind_speed': 4.79, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 60.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T05:00:00Z', + 'dew_point': 23.2, + 'humidity': 64, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1012.53, + 'temperature': 30.7, + 'uv_index': 5, + 'wind_bearing': 336, + 'wind_gust_speed': 23.28, + 'wind_speed': 5.07, + }), + dict({ + 'apparent_temperature': 33.8, + 'cloud_coverage': 59.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T06:00:00Z', + 'dew_point': 23.1, + 'humidity': 66, + 'precipitation': 0.2, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1012.49, + 'temperature': 30.2, + 'uv_index': 3, + 'wind_bearing': 336, + 'wind_gust_speed': 22.05, + 'wind_speed': 5.34, + }), + dict({ + 'apparent_temperature': 32.9, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-14T07:00:00Z', + 'dew_point': 23.0, + 'humidity': 68, + 'precipitation': 0.2, + 'precipitation_probability': 40.0, + 'pressure': 1012.73, + 'temperature': 29.5, + 'uv_index': 2, + 'wind_bearing': 339, + 'wind_gust_speed': 21.18, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 31.6, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T08:00:00Z', + 'dew_point': 22.8, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 45.0, + 'pressure': 1013.16, + 'temperature': 28.4, + 'uv_index': 0, + 'wind_bearing': 342, + 'wind_gust_speed': 20.35, + 'wind_speed': 5.93, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T09:00:00Z', + 'dew_point': 22.5, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1013.62, + 'temperature': 27.1, + 'uv_index': 0, + 'wind_bearing': 347, + 'wind_gust_speed': 19.42, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 29.0, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T10:00:00Z', + 'dew_point': 22.4, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.09, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 348, + 'wind_gust_speed': 18.19, + 'wind_speed': 5.31, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T11:00:00Z', + 'dew_point': 22.4, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.56, + 'temperature': 25.5, + 'uv_index': 0, + 'wind_bearing': 177, + 'wind_gust_speed': 16.79, + 'wind_speed': 4.28, + }), + dict({ + 'apparent_temperature': 27.5, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T12:00:00Z', + 'dew_point': 22.3, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.87, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 15.61, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T13:00:00Z', + 'dew_point': 22.1, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.91, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 14.7, + 'wind_speed': 4.11, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T14:00:00Z', + 'dew_point': 21.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.8, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 171, + 'wind_gust_speed': 13.81, + 'wind_speed': 4.97, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T15:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.66, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 170, + 'wind_gust_speed': 12.88, + 'wind_speed': 5.57, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T16:00:00Z', + 'dew_point': 21.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.54, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 12.0, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 24.4, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T17:00:00Z', + 'dew_point': 21.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 40.0, + 'pressure': 1014.45, + 'temperature': 22.4, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 11.43, + 'wind_speed': 5.48, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T18:00:00Z', + 'dew_point': 21.4, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 44.0, + 'pressure': 1014.45, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 11.42, + 'wind_speed': 5.38, + }), + dict({ + 'apparent_temperature': 25.0, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T19:00:00Z', + 'dew_point': 21.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 52.0, + 'pressure': 1014.63, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 12.15, + 'wind_speed': 5.39, + }), + dict({ + 'apparent_temperature': 25.6, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-14T20:00:00Z', + 'dew_point': 21.8, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 51.0, + 'pressure': 1014.91, + 'temperature': 23.4, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 13.54, + 'wind_speed': 5.45, + }), + dict({ + 'apparent_temperature': 26.6, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T21:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 42.0, + 'pressure': 1015.18, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 15.48, + 'wind_speed': 5.62, + }), + dict({ + 'apparent_temperature': 28.5, + 'cloud_coverage': 32.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T22:00:00Z', + 'dew_point': 22.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 28.999999999999996, + 'pressure': 1015.4, + 'temperature': 25.7, + 'uv_index': 1, + 'wind_bearing': 158, + 'wind_gust_speed': 17.86, + 'wind_speed': 5.84, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-14T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 77, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.54, + 'temperature': 27.2, + 'uv_index': 2, + 'wind_bearing': 155, + 'wind_gust_speed': 20.19, + 'wind_speed': 6.09, + }), + dict({ + 'apparent_temperature': 32.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T00:00:00Z', + 'dew_point': 23.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.55, + 'temperature': 28.6, + 'uv_index': 4, + 'wind_bearing': 152, + 'wind_gust_speed': 21.83, + 'wind_speed': 6.42, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-15T01:00:00Z', + 'dew_point': 23.5, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.35, + 'temperature': 29.6, + 'uv_index': 6, + 'wind_bearing': 144, + 'wind_gust_speed': 22.56, + 'wind_speed': 6.91, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T02:00:00Z', + 'dew_point': 23.5, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.0, + 'temperature': 30.4, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.83, + 'wind_speed': 7.47, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T03:00:00Z', + 'dew_point': 23.5, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.62, + 'temperature': 30.9, + 'uv_index': 7, + 'wind_bearing': 336, + 'wind_gust_speed': 22.98, + 'wind_speed': 7.95, + }), + dict({ + 'apparent_temperature': 35.4, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T04:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 31.3, + 'uv_index': 6, + 'wind_bearing': 341, + 'wind_gust_speed': 23.21, + 'wind_speed': 8.44, + }), + dict({ + 'apparent_temperature': 35.6, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T05:00:00Z', + 'dew_point': 23.7, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.95, + 'temperature': 31.5, + 'uv_index': 5, + 'wind_bearing': 344, + 'wind_gust_speed': 23.46, + 'wind_speed': 8.95, + }), + dict({ + 'apparent_temperature': 35.1, + 'cloud_coverage': 42.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T06:00:00Z', + 'dew_point': 23.6, + 'humidity': 64, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.83, + 'temperature': 31.1, + 'uv_index': 3, + 'wind_bearing': 347, + 'wind_gust_speed': 23.64, + 'wind_speed': 9.13, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T07:00:00Z', + 'dew_point': 23.4, + 'humidity': 66, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.96, + 'temperature': 30.3, + 'uv_index': 2, + 'wind_bearing': 350, + 'wind_gust_speed': 23.66, + 'wind_speed': 8.78, + }), + dict({ + 'apparent_temperature': 32.4, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T08:00:00Z', + 'dew_point': 23.1, + 'humidity': 70, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.25, + 'temperature': 29.0, + 'uv_index': 0, + 'wind_bearing': 356, + 'wind_gust_speed': 23.51, + 'wind_speed': 8.13, + }), + dict({ + 'apparent_temperature': 31.1, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T09:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.61, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 3, + 'wind_gust_speed': 23.21, + 'wind_speed': 7.48, + }), + dict({ + 'apparent_temperature': 30.0, + 'cloud_coverage': 43.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T10:00:00Z', + 'dew_point': 22.8, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.02, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 20, + 'wind_gust_speed': 22.68, + 'wind_speed': 6.83, + }), + dict({ + 'apparent_temperature': 29.2, + 'cloud_coverage': 46.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T11:00:00Z', + 'dew_point': 22.8, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.43, + 'temperature': 26.2, + 'uv_index': 0, + 'wind_bearing': 129, + 'wind_gust_speed': 22.04, + 'wind_speed': 6.1, + }), + dict({ + 'apparent_temperature': 28.4, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T12:00:00Z', + 'dew_point': 22.7, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.71, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.64, + 'wind_speed': 5.6, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T13:00:00Z', + 'dew_point': 23.2, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.52, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 16.35, + 'wind_speed': 5.58, + }), + dict({ + 'apparent_temperature': 27.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T14:00:00Z', + 'dew_point': 22.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.37, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 168, + 'wind_gust_speed': 17.11, + 'wind_speed': 5.79, + }), + dict({ + 'apparent_temperature': 26.9, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.21, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 17.32, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.4, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 16.6, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T17:00:00Z', + 'dew_point': 22.5, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.95, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 219, + 'wind_gust_speed': 15.52, + 'wind_speed': 4.62, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T18:00:00Z', + 'dew_point': 22.3, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.88, + 'temperature': 23.3, + 'uv_index': 0, + 'wind_bearing': 216, + 'wind_gust_speed': 14.64, + 'wind_speed': 4.32, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T19:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.91, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 198, + 'wind_gust_speed': 14.06, + 'wind_speed': 4.73, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T20:00:00Z', + 'dew_point': 22.4, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.99, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 189, + 'wind_gust_speed': 13.7, + 'wind_speed': 5.49, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-15T21:00:00Z', + 'dew_point': 22.5, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.07, + 'temperature': 24.4, + 'uv_index': 0, + 'wind_bearing': 183, + 'wind_gust_speed': 13.77, + 'wind_speed': 5.95, + }), + dict({ + 'apparent_temperature': 28.3, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 84, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.12, + 'temperature': 25.5, + 'uv_index': 1, + 'wind_bearing': 179, + 'wind_gust_speed': 14.38, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 29.9, + 'cloud_coverage': 52.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-15T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.13, + 'temperature': 26.9, + 'uv_index': 2, + 'wind_bearing': 170, + 'wind_gust_speed': 15.2, + 'wind_speed': 5.27, + }), + dict({ + 'apparent_temperature': 31.2, + 'cloud_coverage': 44.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T00:00:00Z', + 'dew_point': 22.9, + 'humidity': 74, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1015.04, + 'temperature': 28.0, + 'uv_index': 4, + 'wind_bearing': 155, + 'wind_gust_speed': 15.85, + 'wind_speed': 4.76, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 24.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T01:00:00Z', + 'dew_point': 22.6, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.52, + 'temperature': 29.2, + 'uv_index': 6, + 'wind_bearing': 110, + 'wind_gust_speed': 16.27, + 'wind_speed': 6.81, + }), + dict({ + 'apparent_temperature': 33.5, + 'cloud_coverage': 16.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T02:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1014.01, + 'temperature': 30.2, + 'uv_index': 8, + 'wind_bearing': 30, + 'wind_gust_speed': 16.55, + 'wind_speed': 6.86, + }), + dict({ + 'apparent_temperature': 34.2, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T03:00:00Z', + 'dew_point': 22.0, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.45, + 'temperature': 31.1, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.52, + 'wind_speed': 6.8, + }), + dict({ + 'apparent_temperature': 34.7, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T04:00:00Z', + 'dew_point': 21.9, + 'humidity': 57, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.89, + 'temperature': 31.5, + 'uv_index': 8, + 'wind_bearing': 17, + 'wind_gust_speed': 16.08, + 'wind_speed': 6.62, + }), + dict({ + 'apparent_temperature': 34.9, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T05:00:00Z', + 'dew_point': 21.9, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.39, + 'temperature': 31.8, + 'uv_index': 6, + 'wind_bearing': 20, + 'wind_gust_speed': 15.48, + 'wind_speed': 6.45, + }), + dict({ + 'apparent_temperature': 34.5, + 'cloud_coverage': 10.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T06:00:00Z', + 'dew_point': 21.7, + 'humidity': 56, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.11, + 'temperature': 31.4, + 'uv_index': 4, + 'wind_bearing': 26, + 'wind_gust_speed': 15.08, + 'wind_speed': 6.43, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 7.000000000000001, + 'condition': 'sunny', + 'datetime': '2023-09-16T07:00:00Z', + 'dew_point': 21.7, + 'humidity': 59, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.15, + 'temperature': 30.7, + 'uv_index': 2, + 'wind_bearing': 39, + 'wind_gust_speed': 14.88, + 'wind_speed': 6.61, + }), + dict({ + 'apparent_temperature': 32.5, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 63, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.41, + 'temperature': 29.6, + 'uv_index': 0, + 'wind_bearing': 72, + 'wind_gust_speed': 14.82, + 'wind_speed': 6.95, + }), + dict({ + 'apparent_temperature': 31.4, + 'cloud_coverage': 2.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T09:00:00Z', + 'dew_point': 22.1, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.75, + 'temperature': 28.5, + 'uv_index': 0, + 'wind_bearing': 116, + 'wind_gust_speed': 15.13, + 'wind_speed': 7.45, + }), + dict({ + 'apparent_temperature': 30.5, + 'cloud_coverage': 13.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T10:00:00Z', + 'dew_point': 22.3, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.13, + 'temperature': 27.6, + 'uv_index': 0, + 'wind_bearing': 140, + 'wind_gust_speed': 16.09, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 31.0, + 'condition': 'sunny', + 'datetime': '2023-09-16T11:00:00Z', + 'dew_point': 22.6, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.47, + 'temperature': 26.9, + 'uv_index': 0, + 'wind_bearing': 149, + 'wind_gust_speed': 17.37, + 'wind_speed': 8.87, + }), + dict({ + 'apparent_temperature': 29.3, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T12:00:00Z', + 'dew_point': 22.9, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.6, + 'temperature': 26.3, + 'uv_index': 0, + 'wind_bearing': 155, + 'wind_gust_speed': 18.29, + 'wind_speed': 9.21, + }), + dict({ + 'apparent_temperature': 28.7, + 'cloud_coverage': 51.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T13:00:00Z', + 'dew_point': 23.0, + 'humidity': 85, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.41, + 'temperature': 25.7, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 18.49, + 'wind_speed': 8.96, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 55.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T14:00:00Z', + 'dew_point': 22.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1013.01, + 'temperature': 25.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.47, + 'wind_speed': 8.45, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T15:00:00Z', + 'dew_point': 22.7, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.55, + 'temperature': 24.5, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 18.79, + 'wind_speed': 8.1, + }), + dict({ + 'apparent_temperature': 26.7, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T16:00:00Z', + 'dew_point': 22.6, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.1, + 'temperature': 24.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 19.81, + 'wind_speed': 8.15, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T17:00:00Z', + 'dew_point': 22.6, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.68, + 'temperature': 23.7, + 'uv_index': 0, + 'wind_bearing': 161, + 'wind_gust_speed': 20.96, + 'wind_speed': 8.3, + }), + dict({ + 'apparent_temperature': 26.0, + 'cloud_coverage': 72.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T18:00:00Z', + 'dew_point': 22.4, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 21.41, + 'wind_speed': 8.24, + }), + dict({ + 'apparent_temperature': 26.3, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T19:00:00Z', + 'dew_point': 22.5, + 'humidity': 93, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.29, + 'temperature': 23.8, + 'uv_index': 0, + 'wind_bearing': 159, + 'wind_gust_speed': 20.42, + 'wind_speed': 7.62, + }), + dict({ + 'apparent_temperature': 26.8, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-16T20:00:00Z', + 'dew_point': 22.6, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.31, + 'temperature': 24.2, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 18.61, + 'wind_speed': 6.66, + }), + dict({ + 'apparent_temperature': 27.7, + 'cloud_coverage': 57.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T21:00:00Z', + 'dew_point': 22.6, + 'humidity': 87, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 24.9, + 'uv_index': 0, + 'wind_bearing': 158, + 'wind_gust_speed': 17.14, + 'wind_speed': 5.86, + }), + dict({ + 'apparent_temperature': 28.9, + 'cloud_coverage': 48.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T22:00:00Z', + 'dew_point': 22.6, + 'humidity': 82, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.46, + 'temperature': 26.0, + 'uv_index': 1, + 'wind_bearing': 161, + 'wind_gust_speed': 16.78, + 'wind_speed': 5.5, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 39.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-16T23:00:00Z', + 'dew_point': 22.9, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.51, + 'temperature': 27.5, + 'uv_index': 2, + 'wind_bearing': 165, + 'wind_gust_speed': 17.21, + 'wind_speed': 5.56, + }), + dict({ + 'apparent_temperature': 31.7, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T00:00:00Z', + 'dew_point': 22.8, + 'humidity': 71, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.39, + 'temperature': 28.5, + 'uv_index': 4, + 'wind_bearing': 174, + 'wind_gust_speed': 17.96, + 'wind_speed': 6.04, + }), + dict({ + 'apparent_temperature': 32.6, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T01:00:00Z', + 'dew_point': 22.7, + 'humidity': 68, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.98, + 'temperature': 29.4, + 'uv_index': 6, + 'wind_bearing': 192, + 'wind_gust_speed': 19.15, + 'wind_speed': 7.23, + }), + dict({ + 'apparent_temperature': 33.6, + 'cloud_coverage': 28.999999999999996, + 'condition': 'sunny', + 'datetime': '2023-09-17T02:00:00Z', + 'dew_point': 22.8, + 'humidity': 65, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1010.38, + 'temperature': 30.1, + 'uv_index': 7, + 'wind_bearing': 225, + 'wind_gust_speed': 20.89, + 'wind_speed': 8.9, + }), + dict({ + 'apparent_temperature': 34.1, + 'cloud_coverage': 30.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T03:00:00Z', + 'dew_point': 22.8, + 'humidity': 63, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1009.75, + 'temperature': 30.7, + 'uv_index': 8, + 'wind_bearing': 264, + 'wind_gust_speed': 22.67, + 'wind_speed': 10.27, + }), + dict({ + 'apparent_temperature': 33.9, + 'cloud_coverage': 37.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T04:00:00Z', + 'dew_point': 22.5, + 'humidity': 62, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1009.18, + 'temperature': 30.5, + 'uv_index': 7, + 'wind_bearing': 293, + 'wind_gust_speed': 23.93, + 'wind_speed': 10.82, + }), + dict({ + 'apparent_temperature': 33.4, + 'cloud_coverage': 45.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T05:00:00Z', + 'dew_point': 22.4, + 'humidity': 63, + 'precipitation': 0.6, + 'precipitation_probability': 12.0, + 'pressure': 1008.71, + 'temperature': 30.1, + 'uv_index': 5, + 'wind_bearing': 308, + 'wind_gust_speed': 24.39, + 'wind_speed': 10.72, + }), + dict({ + 'apparent_temperature': 32.7, + 'cloud_coverage': 50.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T06:00:00Z', + 'dew_point': 22.2, + 'humidity': 64, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.46, + 'temperature': 29.6, + 'uv_index': 3, + 'wind_bearing': 312, + 'wind_gust_speed': 23.9, + 'wind_speed': 10.28, + }), + dict({ + 'apparent_temperature': 31.8, + 'cloud_coverage': 47.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T07:00:00Z', + 'dew_point': 22.1, + 'humidity': 67, + 'precipitation': 0.7, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1008.53, + 'temperature': 28.9, + 'uv_index': 1, + 'wind_bearing': 312, + 'wind_gust_speed': 22.3, + 'wind_speed': 9.59, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 41.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T08:00:00Z', + 'dew_point': 21.9, + 'humidity': 70, + 'precipitation': 0.6, + 'precipitation_probability': 15.0, + 'pressure': 1008.82, + 'temperature': 27.9, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 19.73, + 'wind_speed': 8.58, + }), + dict({ + 'apparent_temperature': 29.6, + 'cloud_coverage': 35.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T09:00:00Z', + 'dew_point': 22.0, + 'humidity': 74, + 'precipitation': 0.5, + 'precipitation_probability': 15.0, + 'pressure': 1009.21, + 'temperature': 27.0, + 'uv_index': 0, + 'wind_bearing': 291, + 'wind_gust_speed': 16.49, + 'wind_speed': 7.34, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 33.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T10:00:00Z', + 'dew_point': 21.9, + 'humidity': 78, + 'precipitation': 0.4, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1009.65, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 257, + 'wind_gust_speed': 12.71, + 'wind_speed': 5.91, + }), + dict({ + 'apparent_temperature': 27.8, + 'cloud_coverage': 34.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T11:00:00Z', + 'dew_point': 21.9, + 'humidity': 82, + 'precipitation': 0.3, + 'precipitation_probability': 14.000000000000002, + 'pressure': 1010.04, + 'temperature': 25.3, + 'uv_index': 0, + 'wind_bearing': 212, + 'wind_gust_speed': 9.16, + 'wind_speed': 4.54, + }), + dict({ + 'apparent_temperature': 27.1, + 'cloud_coverage': 36.0, + 'condition': 'sunny', + 'datetime': '2023-09-17T12:00:00Z', + 'dew_point': 21.9, + 'humidity': 85, + 'precipitation': 0.3, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1010.24, + 'temperature': 24.6, + 'uv_index': 0, + 'wind_bearing': 192, + 'wind_gust_speed': 7.09, + 'wind_speed': 3.62, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 40.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T13:00:00Z', + 'dew_point': 22.0, + 'humidity': 88, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1010.15, + 'temperature': 24.1, + 'uv_index': 0, + 'wind_bearing': 185, + 'wind_gust_speed': 7.2, + 'wind_speed': 3.27, + }), + dict({ + 'apparent_temperature': 25.9, + 'cloud_coverage': 44.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T14:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.3, + 'precipitation_probability': 30.0, + 'pressure': 1009.87, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.22, + }), + dict({ + 'apparent_temperature': 25.5, + 'cloud_coverage': 49.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T15:00:00Z', + 'dew_point': 21.8, + 'humidity': 92, + 'precipitation': 0.2, + 'precipitation_probability': 31.0, + 'pressure': 1009.56, + 'temperature': 23.2, + 'uv_index': 0, + 'wind_bearing': 180, + 'wind_gust_speed': 9.21, + 'wind_speed': 3.3, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 53.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T16:00:00Z', + 'dew_point': 21.8, + 'humidity': 94, + 'precipitation': 0.2, + 'precipitation_probability': 33.0, + 'pressure': 1009.29, + 'temperature': 22.9, + 'uv_index': 0, + 'wind_bearing': 182, + 'wind_gust_speed': 9.0, + 'wind_speed': 3.46, + }), + dict({ + 'apparent_temperature': 24.8, + 'cloud_coverage': 56.00000000000001, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T17:00:00Z', + 'dew_point': 21.7, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 35.0, + 'pressure': 1009.09, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 186, + 'wind_gust_speed': 8.37, + 'wind_speed': 3.72, + }), + dict({ + 'apparent_temperature': 24.6, + 'cloud_coverage': 59.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T18:00:00Z', + 'dew_point': 21.6, + 'humidity': 95, + 'precipitation': 0.0, + 'precipitation_probability': 37.0, + 'pressure': 1009.01, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 201, + 'wind_gust_speed': 7.99, + 'wind_speed': 4.07, + }), + dict({ + 'apparent_temperature': 24.9, + 'cloud_coverage': 62.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-17T19:00:00Z', + 'dew_point': 21.7, + 'humidity': 94, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.07, + 'temperature': 22.7, + 'uv_index': 0, + 'wind_bearing': 258, + 'wind_gust_speed': 8.18, + 'wind_speed': 4.55, + }), + dict({ + 'apparent_temperature': 25.2, + 'cloud_coverage': 64.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T20:00:00Z', + 'dew_point': 21.7, + 'humidity': 92, + 'precipitation': 0.0, + 'precipitation_probability': 39.0, + 'pressure': 1009.23, + 'temperature': 23.0, + 'uv_index': 0, + 'wind_bearing': 305, + 'wind_gust_speed': 8.77, + 'wind_speed': 5.17, + }), + dict({ + 'apparent_temperature': 25.8, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T21:00:00Z', + 'dew_point': 21.8, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 38.0, + 'pressure': 1009.47, + 'temperature': 23.5, + 'uv_index': 0, + 'wind_bearing': 318, + 'wind_gust_speed': 9.69, + 'wind_speed': 5.77, + }), + dict({ + 'apparent_temperature': 26.5, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-17T22:00:00Z', + 'dew_point': 21.8, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 30.0, + 'pressure': 1009.77, + 'temperature': 24.2, + 'uv_index': 1, + 'wind_bearing': 324, + 'wind_gust_speed': 10.88, + 'wind_speed': 6.26, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 80.0, + 'condition': 'rainy', + 'datetime': '2023-09-17T23:00:00Z', + 'dew_point': 21.9, + 'humidity': 83, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.09, + 'temperature': 25.1, + 'uv_index': 2, + 'wind_bearing': 329, + 'wind_gust_speed': 12.21, + 'wind_speed': 6.68, + }), + dict({ + 'apparent_temperature': 28.2, + 'cloud_coverage': 87.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T00:00:00Z', + 'dew_point': 21.9, + 'humidity': 80, + 'precipitation': 0.2, + 'precipitation_probability': 15.0, + 'pressure': 1010.33, + 'temperature': 25.7, + 'uv_index': 3, + 'wind_bearing': 332, + 'wind_gust_speed': 13.52, + 'wind_speed': 7.12, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T01:00:00Z', + 'dew_point': 21.7, + 'humidity': 72, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1007.43, + 'temperature': 27.2, + 'uv_index': 5, + 'wind_bearing': 330, + 'wind_gust_speed': 11.36, + 'wind_speed': 11.36, + }), + dict({ + 'apparent_temperature': 30.1, + 'cloud_coverage': 70.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T02:00:00Z', + 'dew_point': 21.6, + 'humidity': 70, + 'precipitation': 0.3, + 'precipitation_probability': 9.0, + 'pressure': 1007.05, + 'temperature': 27.5, + 'uv_index': 6, + 'wind_bearing': 332, + 'wind_gust_speed': 12.06, + 'wind_speed': 12.06, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 71.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T03:00:00Z', + 'dew_point': 21.6, + 'humidity': 69, + 'precipitation': 0.5, + 'precipitation_probability': 10.0, + 'pressure': 1006.67, + 'temperature': 27.8, + 'uv_index': 6, + 'wind_bearing': 333, + 'wind_gust_speed': 12.81, + 'wind_speed': 12.81, + }), + dict({ + 'apparent_temperature': 30.6, + 'cloud_coverage': 67.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T04:00:00Z', + 'dew_point': 21.5, + 'humidity': 68, + 'precipitation': 0.4, + 'precipitation_probability': 10.0, + 'pressure': 1006.28, + 'temperature': 28.0, + 'uv_index': 5, + 'wind_bearing': 335, + 'wind_gust_speed': 13.68, + 'wind_speed': 13.68, + }), + dict({ + 'apparent_temperature': 30.7, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T05:00:00Z', + 'dew_point': 21.4, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1005.89, + 'temperature': 28.1, + 'uv_index': 4, + 'wind_bearing': 336, + 'wind_gust_speed': 14.61, + 'wind_speed': 14.61, + }), + dict({ + 'apparent_temperature': 30.3, + 'cloud_coverage': 56.99999999999999, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T06:00:00Z', + 'dew_point': 21.2, + 'humidity': 67, + 'precipitation': 0.0, + 'precipitation_probability': 27.0, + 'pressure': 1005.67, + 'temperature': 27.9, + 'uv_index': 3, + 'wind_bearing': 338, + 'wind_gust_speed': 15.25, + 'wind_speed': 15.25, + }), + dict({ + 'apparent_temperature': 29.8, + 'cloud_coverage': 60.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T07:00:00Z', + 'dew_point': 21.3, + 'humidity': 69, + 'precipitation': 0.0, + 'precipitation_probability': 28.000000000000004, + 'pressure': 1005.74, + 'temperature': 27.4, + 'uv_index': 1, + 'wind_bearing': 339, + 'wind_gust_speed': 15.45, + 'wind_speed': 15.45, + }), + dict({ + 'apparent_temperature': 29.1, + 'cloud_coverage': 65.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T08:00:00Z', + 'dew_point': 21.4, + 'humidity': 73, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1005.98, + 'temperature': 26.7, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.38, + 'wind_speed': 15.38, + }), + dict({ + 'apparent_temperature': 28.6, + 'cloud_coverage': 68.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T09:00:00Z', + 'dew_point': 21.6, + 'humidity': 76, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.22, + 'temperature': 26.1, + 'uv_index': 0, + 'wind_bearing': 341, + 'wind_gust_speed': 15.27, + 'wind_speed': 15.27, + }), + dict({ + 'apparent_temperature': 27.9, + 'cloud_coverage': 66.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T10:00:00Z', + 'dew_point': 21.6, + 'humidity': 79, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1006.44, + 'temperature': 25.6, + 'uv_index': 0, + 'wind_bearing': 339, + 'wind_gust_speed': 15.09, + 'wind_speed': 15.09, + }), + dict({ + 'apparent_temperature': 27.6, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T11:00:00Z', + 'dew_point': 21.7, + 'humidity': 81, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.66, + 'temperature': 25.2, + 'uv_index': 0, + 'wind_bearing': 336, + 'wind_gust_speed': 14.88, + 'wind_speed': 14.88, + }), + dict({ + 'apparent_temperature': 27.2, + 'cloud_coverage': 61.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T12:00:00Z', + 'dew_point': 21.8, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 26.0, + 'pressure': 1006.79, + 'temperature': 24.8, + 'uv_index': 0, + 'wind_bearing': 333, + 'wind_gust_speed': 14.91, + 'wind_speed': 14.91, + }), + dict({ + 'apparent_temperature': 25.7, + 'cloud_coverage': 38.0, + 'condition': 'partlycloudy', + 'datetime': '2023-09-18T13:00:00Z', + 'dew_point': 21.2, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1012.36, + 'temperature': 23.6, + 'uv_index': 0, + 'wind_bearing': 83, + 'wind_gust_speed': 4.58, + 'wind_speed': 3.16, + }), + dict({ + 'apparent_temperature': 25.1, + 'cloud_coverage': 74.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T14:00:00Z', + 'dew_point': 21.2, + 'humidity': 89, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.96, + 'temperature': 23.1, + 'uv_index': 0, + 'wind_bearing': 144, + 'wind_gust_speed': 4.74, + 'wind_speed': 4.52, + }), + dict({ + 'apparent_temperature': 24.5, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T15:00:00Z', + 'dew_point': 20.9, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.6, + 'temperature': 22.6, + 'uv_index': 0, + 'wind_bearing': 152, + 'wind_gust_speed': 5.63, + 'wind_speed': 5.63, + }), + dict({ + 'apparent_temperature': 24.0, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T16:00:00Z', + 'dew_point': 20.7, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.37, + 'temperature': 22.3, + 'uv_index': 0, + 'wind_bearing': 156, + 'wind_gust_speed': 6.02, + 'wind_speed': 6.02, + }), + dict({ + 'apparent_temperature': 23.7, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T17:00:00Z', + 'dew_point': 20.4, + 'humidity': 91, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.2, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 6.15, + 'wind_speed': 6.15, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T18:00:00Z', + 'dew_point': 20.2, + 'humidity': 90, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.08, + 'temperature': 21.9, + 'uv_index': 0, + 'wind_bearing': 167, + 'wind_gust_speed': 6.48, + 'wind_speed': 6.48, + }), + dict({ + 'apparent_temperature': 23.2, + 'cloud_coverage': 100.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T19:00:00Z', + 'dew_point': 19.8, + 'humidity': 88, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.04, + 'temperature': 21.8, + 'uv_index': 0, + 'wind_bearing': 165, + 'wind_gust_speed': 7.51, + 'wind_speed': 7.51, + }), + dict({ + 'apparent_temperature': 23.4, + 'cloud_coverage': 99.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T20:00:00Z', + 'dew_point': 19.6, + 'humidity': 86, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.05, + 'temperature': 22.0, + 'uv_index': 0, + 'wind_bearing': 162, + 'wind_gust_speed': 8.73, + 'wind_speed': 8.73, + }), + dict({ + 'apparent_temperature': 23.9, + 'cloud_coverage': 98.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T21:00:00Z', + 'dew_point': 19.5, + 'humidity': 83, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.06, + 'temperature': 22.5, + 'uv_index': 0, + 'wind_bearing': 164, + 'wind_gust_speed': 9.21, + 'wind_speed': 9.11, + }), + dict({ + 'apparent_temperature': 25.3, + 'cloud_coverage': 96.0, + 'condition': 'cloudy', + 'datetime': '2023-09-18T22:00:00Z', + 'dew_point': 19.7, + 'humidity': 78, + 'precipitation': 0.0, + 'precipitation_probability': 0.0, + 'pressure': 1011.09, + 'temperature': 23.8, + 'uv_index': 1, + 'wind_bearing': 171, + 'wind_gust_speed': 9.03, + 'wind_speed': 7.91, + }), + ]), + }), + }) +# --- diff --git a/tests/components/weatherkit/test_weather.py b/tests/components/weatherkit/test_weather.py index fabd3aab572..3b3a9a50d7f 100644 --- a/tests/components/weatherkit/test_weather.py +++ b/tests/components/weatherkit/test_weather.py @@ -1,5 +1,6 @@ """Weather entity tests for the WeatherKit integration.""" +import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.weather import ( @@ -15,7 +16,8 @@ from homeassistant.components.weather import ( ATTR_WEATHER_WIND_GUST_SPEED, ATTR_WEATHER_WIND_SPEED, DOMAIN as WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + LEGACY_SERVICE_GET_FORECAST, + SERVICE_GET_FORECASTS, ) from homeassistant.components.weather.const import WeatherEntityFeature from homeassistant.components.weatherkit.const import ATTRIBUTION @@ -77,15 +79,22 @@ async def test_hourly_forecast_missing(hass: HomeAssistant) -> None: ) == 0 +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) async def test_hourly_forecast( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, snapshot: SnapshotAssertion, service: str ) -> None: """Test states of the hourly forecast.""" await init_integration(hass) response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.home", "type": "hourly", @@ -93,17 +102,25 @@ async def test_hourly_forecast( blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot -async def test_daily_forecast(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_GET_FORECASTS, + LEGACY_SERVICE_GET_FORECAST, + ], +) +async def test_daily_forecast( + hass: HomeAssistant, snapshot: SnapshotAssertion, service: str +) -> None: """Test states of the daily forecast.""" await init_integration(hass) response = await hass.services.async_call( WEATHER_DOMAIN, - SERVICE_GET_FORECAST, + service, { "entity_id": "weather.home", "type": "daily", @@ -111,5 +128,4 @@ async def test_daily_forecast(hass: HomeAssistant, snapshot: SnapshotAssertion) blocking=True, return_response=True, ) - assert response["forecast"] != [] assert response == snapshot diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index a9551310c2a..127b45484be 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -2317,6 +2317,65 @@ async def test_execute_script( assert call.context.as_dict() == msg_var["result"]["context"] +@pytest.mark.parametrize( + ("raise_exception", "err_code"), + [ + ( + HomeAssistantError( + "Some error", + translation_domain="test", + translation_key="test_error", + translation_placeholders={"option": "bla"}, + ), + "home_assistant_error", + ), + ( + ServiceValidationError( + "Some error", + translation_domain="test", + translation_key="test_error", + translation_placeholders={"option": "bla"}, + ), + "service_validation_error", + ), + ], +) +async def test_execute_script_err_localization( + hass: HomeAssistant, + websocket_client: MockHAClientWebSocket, + raise_exception: HomeAssistantError, + err_code: str, +) -> None: + """Test testing a condition.""" + async_mock_service( + hass, "domain_test", "test_service", raise_exception=raise_exception + ) + + await websocket_client.send_json( + { + "id": 5, + "type": "execute_script", + "sequence": [ + { + "service": "domain_test.test_service", + "data": {"hello": "world"}, + }, + {"stop": "done", "response_variable": "service_result"}, + ], + } + ) + + msg = await websocket_client.receive_json() + assert msg["id"] == 5 + assert msg["type"] == const.TYPE_RESULT + assert msg["success"] is False + assert msg["error"]["code"] == err_code + assert msg["error"]["message"] == "Some error" + assert msg["error"]["translation_key"] == "test_error" + assert msg["error"]["translation_domain"] == "test" + assert msg["error"]["translation_placeholders"] == {"option": "bla"} + + async def test_execute_script_complex_response( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: diff --git a/tests/components/websocket_api/test_connection.py b/tests/components/websocket_api/test_connection.py index da435d64d58..80936d30752 100644 --- a/tests/components/websocket_api/test_connection.py +++ b/tests/components/websocket_api/test_connection.py @@ -39,9 +39,15 @@ from tests.common import MockUser ), ( exceptions.HomeAssistantError("Failed to do X"), - websocket_api.ERR_UNKNOWN_ERROR, + websocket_api.ERR_HOME_ASSISTANT_ERROR, "Failed to do X", - "Error handling message: Failed to do X (unknown_error) Mock User from 127.0.0.42 (Browser)", + "Error handling message: Failed to do X (home_assistant_error) Mock User from 127.0.0.42 (Browser)", + ), + ( + exceptions.ServiceValidationError("Failed to do X"), + websocket_api.ERR_HOME_ASSISTANT_ERROR, + "Failed to do X", + "Error handling message: Failed to do X (home_assistant_error) Mock User from 127.0.0.42 (Browser)", ), ( ValueError("Really bad"), diff --git a/tests/components/withings/snapshots/test_sensor.ambr b/tests/components/withings/snapshots/test_sensor.ambr index 59d9b470247..4ca4093e3b8 100644 --- a/tests/components/withings/snapshots/test_sensor.ambr +++ b/tests/components/withings/snapshots/test_sensor.ambr @@ -178,6 +178,38 @@ 'state': '1020.121', }) # --- +# name: test_all_entities[sensor.henk_elevation_change_last_workout] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'henk Elevation change last workout', + 'icon': 'mdi:stairs-up', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.henk_elevation_change_last_workout', + 'last_changed': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_all_entities[sensor.henk_elevation_change_today] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'henk Elevation change today', + 'icon': 'mdi:stairs-up', + 'last_reset': '2023-10-20T00:00:00-07:00', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.henk_elevation_change_today', + 'last_changed': , + 'last_updated': , + 'state': '0', + }) +# --- # name: test_all_entities[sensor.henk_extracellular_water] StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -237,36 +269,6 @@ 'state': '0.07', }) # --- -# name: test_all_entities[sensor.henk_floors_climbed_last_workout] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'henk Floors climbed last workout', - 'icon': 'mdi:stairs-up', - 'unit_of_measurement': 'floors', - }), - 'context': , - 'entity_id': 'sensor.henk_floors_climbed_last_workout', - 'last_changed': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_all_entities[sensor.henk_floors_climbed_today] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'henk Floors climbed today', - 'icon': 'mdi:stairs-up', - 'last_reset': '2023-10-20T00:00:00-07:00', - 'state_class': , - 'unit_of_measurement': 'floors', - }), - 'context': , - 'entity_id': 'sensor.henk_floors_climbed_today', - 'last_changed': , - 'last_updated': , - 'state': '0', - }) -# --- # name: test_all_entities[sensor.henk_heart_pulse] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/withings/test_diagnostics.py b/tests/components/withings/test_diagnostics.py index bb5c93e1f09..928eccdde0f 100644 --- a/tests/components/withings/test_diagnostics.py +++ b/tests/components/withings/test_diagnostics.py @@ -67,9 +67,9 @@ async def test_diagnostics_cloudhook_instance( ), patch( "homeassistant.helpers.config_entry_oauth2_flow.async_get_config_entry_implementation", ), patch( - "homeassistant.components.cloud.async_delete_cloudhook" + "homeassistant.components.cloud.async_delete_cloudhook", ), patch( - "homeassistant.components.withings.webhook_generate_url" + "homeassistant.components.withings.webhook_generate_url", ): await setup_integration(hass, webhook_config_entry) await prepare_webhook_setup(hass, freezer) diff --git a/tests/components/withings/test_init.py b/tests/components/withings/test_init.py index 3f20791ac4d..390fbc3bbc3 100644 --- a/tests/components/withings/test_init.py +++ b/tests/components/withings/test_init.py @@ -352,7 +352,7 @@ async def test_removing_entry_with_cloud_unavailable( "homeassistant.components.cloud.async_delete_cloudhook", side_effect=CloudNotAvailable(), ), patch( - "homeassistant.components.withings.webhook_generate_url" + "homeassistant.components.withings.webhook_generate_url", ): await setup_integration(hass, cloudhook_config_entry) assert hass.components.cloud.async_active_subscription() is True @@ -469,9 +469,9 @@ async def test_cloud_disconnect( ), patch( "homeassistant.components.withings.async_get_config_entry_implementation", ), patch( - "homeassistant.components.cloud.async_delete_cloudhook" + "homeassistant.components.cloud.async_delete_cloudhook", ), patch( - "homeassistant.components.withings.webhook_generate_url" + "homeassistant.components.withings.webhook_generate_url", ): await setup_integration(hass, webhook_config_entry) await prepare_webhook_setup(hass, freezer) diff --git a/tests/components/workday/__init__.py b/tests/components/workday/__init__.py index f2744758efb..fb436a57e5c 100644 --- a/tests/components/workday/__init__.py +++ b/tests/components/workday/__init__.py @@ -277,3 +277,25 @@ TEST_CONFIG_ADD_REMOVE_DATE_RANGE = { "remove_holidays": ["2022-12-04", "2022-12-24,2022-12-26"], "language": "de", } +TEST_LANGUAGE_CHANGE = { + "name": DEFAULT_NAME, + "country": "DE", + "province": "BW", + "excludes": DEFAULT_EXCLUDES, + "days_offset": DEFAULT_OFFSET, + "workdays": DEFAULT_WORKDAYS, + "add_holidays": ["2022-12-01", "2022-12-05,2022-12-15"], + "remove_holidays": ["2022-12-04", "2022-12-24,2022-12-26"], + "language": "en", +} +TEST_LANGUAGE_NO_CHANGE = { + "name": DEFAULT_NAME, + "country": "DE", + "province": "BW", + "excludes": DEFAULT_EXCLUDES, + "days_offset": DEFAULT_OFFSET, + "workdays": DEFAULT_WORKDAYS, + "add_holidays": ["2022-12-01", "2022-12-05,2022-12-15"], + "remove_holidays": ["2022-12-04", "2022-12-24,2022-12-26"], + "language": "de", +} diff --git a/tests/components/workday/test_binary_sensor.py b/tests/components/workday/test_binary_sensor.py index 6ce5b08ef27..7457d2e0ada 100644 --- a/tests/components/workday/test_binary_sensor.py +++ b/tests/components/workday/test_binary_sensor.py @@ -35,6 +35,8 @@ from . import ( TEST_CONFIG_WITH_PROVINCE, TEST_CONFIG_WITH_STATE, TEST_CONFIG_YESTERDAY, + TEST_LANGUAGE_CHANGE, + TEST_LANGUAGE_NO_CHANGE, init_integration, ) @@ -313,3 +315,21 @@ async def test_check_date_service( return_response=True, ) assert response == {"binary_sensor.workday_sensor": {"workday": True}} + + +async def test_language_difference_english_language( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test handling difference in English language naming.""" + await init_integration(hass, TEST_LANGUAGE_CHANGE) + assert "Changing language from en to en_US" in caplog.text + + +async def test_language_difference_no_change_other_language( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test skipping if no difference in language naming.""" + await init_integration(hass, TEST_LANGUAGE_NO_CHANGE) + assert "Changing language from en to en_US" not in caplog.text diff --git a/tests/components/workday/test_config_flow.py b/tests/components/workday/test_config_flow.py index 3ecd518ce98..57a7046546e 100644 --- a/tests/components/workday/test_config_flow.py +++ b/tests/components/workday/test_config_flow.py @@ -551,7 +551,7 @@ pytestmark = pytest.mark.usefixtures() ("language", "holiday"), [ ("de", "Weihnachtstag"), - ("en_US", "Christmas"), + ("en", "Christmas"), ], ) async def test_language( diff --git a/tests/components/wyoming/test_tts.py b/tests/components/wyoming/test_tts.py index 68b7b2b62bc..2f2a25558e4 100644 --- a/tests/components/wyoming/test_tts.py +++ b/tests/components/wyoming/test_tts.py @@ -180,7 +180,7 @@ async def test_get_tts_audio_audio_oserror( ), patch.object( mock_client, "read_event", side_effect=OSError("Boom!") ), pytest.raises( - HomeAssistantError + HomeAssistantError, ): await tts.async_get_media_source_audio( hass, diff --git a/tests/components/yamaha_musiccast/test_config_flow.py b/tests/components/yamaha_musiccast/test_config_flow.py index ccccd98b3b6..4ce95e418d0 100644 --- a/tests/components/yamaha_musiccast/test_config_flow.py +++ b/tests/components/yamaha_musiccast/test_config_flow.py @@ -22,9 +22,9 @@ async def silent_ssdp_scanner(hass): ), patch("homeassistant.components.ssdp.Scanner._async_stop_ssdp_listeners"), patch( "homeassistant.components.ssdp.Scanner.async_scan" ), patch( - "homeassistant.components.ssdp.Server._async_start_upnp_servers" + "homeassistant.components.ssdp.Server._async_start_upnp_servers", ), patch( - "homeassistant.components.ssdp.Server._async_stop_upnp_servers" + "homeassistant.components.ssdp.Server._async_stop_upnp_servers", ): yield diff --git a/tests/components/yeelight/test_config_flow.py b/tests/components/yeelight/test_config_flow.py index 0bd5b5f59d0..e1d33ee5f75 100644 --- a/tests/components/yeelight/test_config_flow.py +++ b/tests/components/yeelight/test_config_flow.py @@ -440,9 +440,11 @@ async def test_manual_no_capabilities(hass: HomeAssistant) -> None: ), _patch_discovery_timeout(), _patch_discovery_interval(), patch( f"{MODULE_CONFIG_FLOW}.AsyncBulb", return_value=mocked_bulb ), patch( - f"{MODULE}.async_setup", return_value=True + f"{MODULE}.async_setup", + return_value=True, ), patch( - f"{MODULE}.async_setup_entry", return_value=True + f"{MODULE}.async_setup_entry", + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_HOST: IP_ADDRESS} diff --git a/tests/components/zha/conftest.py b/tests/components/zha/conftest.py index 9d9d74e72df..1b3a536007a 100644 --- a/tests/components/zha/conftest.py +++ b/tests/components/zha/conftest.py @@ -26,7 +26,9 @@ import zigpy.zdo.types as zdo_t import homeassistant.components.zha.core.const as zha_const import homeassistant.components.zha.core.device as zha_core_device from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.helpers import restore_state from homeassistant.setup import async_setup_component +import homeassistant.util.dt as dt_util from .common import patch_cluster as common_patch_cluster @@ -44,7 +46,7 @@ def disable_request_retry_delay(): with patch( "homeassistant.components.zha.core.cluster_handlers.RETRYABLE_REQUEST_DECORATOR", zigpy.util.retryable_request(tries=3, delay=0), - ): + ), patch("homeassistant.components.zha.STARTUP_FAILURE_DELAY_S", 0.01): yield @@ -81,8 +83,8 @@ class _FakeApp(ControllerApplication): async def permit_ncp(self, time_s: int = 60): pass - async def permit_with_key( - self, node: zigpy.types.EUI64, code: bytes, time_s: int = 60 + async def permit_with_link_key( + self, node: zigpy.types.EUI64, link_key: zigpy.types.KeyData, time_s: int = 60 ): pass @@ -498,3 +500,35 @@ def network_backup() -> zigpy.backups.NetworkBackup: }, } ) + + +@pytest.fixture +def core_rs(hass_storage): + """Core.restore_state fixture.""" + + def _storage(entity_id, state, attributes={}): + now = dt_util.utcnow().isoformat() + + hass_storage[restore_state.STORAGE_KEY] = { + "version": restore_state.STORAGE_VERSION, + "key": restore_state.STORAGE_KEY, + "data": [ + { + "state": { + "entity_id": entity_id, + "state": str(state), + "attributes": attributes, + "last_changed": now, + "last_updated": now, + "context": { + "id": "3c2243ff5f30447eb12e7348cfd5b8ff", + "user_id": None, + }, + }, + "last_seen": now, + } + ], + } + return + + return _storage diff --git a/tests/components/zha/test_binary_sensor.py b/tests/components/zha/test_binary_sensor.py index b41499dada7..5dd7a5653ec 100644 --- a/tests/components/zha/test_binary_sensor.py +++ b/tests/components/zha/test_binary_sensor.py @@ -9,8 +9,6 @@ import zigpy.zcl.clusters.security as security from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import restore_state -from homeassistant.util import dt as dt_util from .common import ( async_enable_traffic, @@ -152,38 +150,6 @@ async def test_binary_sensor( assert hass.states.get(entity_id).state == STATE_OFF -@pytest.fixture -def core_rs(hass_storage): - """Core.restore_state fixture.""" - - def _storage(entity_id, attributes, state): - now = dt_util.utcnow().isoformat() - - hass_storage[restore_state.STORAGE_KEY] = { - "version": restore_state.STORAGE_VERSION, - "key": restore_state.STORAGE_KEY, - "data": [ - { - "state": { - "entity_id": entity_id, - "state": str(state), - "attributes": attributes, - "last_changed": now, - "last_updated": now, - "context": { - "id": "3c2243ff5f30447eb12e7348cfd5b8ff", - "user_id": None, - }, - }, - "last_seen": now, - } - ], - } - return - - return _storage - - @pytest.mark.parametrize( "restored_state", [ diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index 9ec8048ea03..883df4aba94 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -10,7 +10,7 @@ import pytest import serial.tools.list_ports from zigpy.backups import BackupManager import zigpy.config -from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH +from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH, SCHEMA_DEVICE import zigpy.device from zigpy.exceptions import NetworkNotFormed import zigpy.types @@ -22,7 +22,7 @@ from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_ from homeassistant.components.zha import config_flow, radio_manager from homeassistant.components.zha.core.const import ( CONF_BAUDRATE, - CONF_FLOWCONTROL, + CONF_FLOW_CONTROL, CONF_RADIO_TYPE, DOMAIN, EZSP_OVERWRITE_EUI64, @@ -118,9 +118,7 @@ def mock_detect_radio_type( async def detect(self): self.radio_type = radio_type - self.device_settings = radio_type.controller.SCHEMA_DEVICE( - {CONF_DEVICE_PATH: self.device_path} - ) + self.device_settings = SCHEMA_DEVICE({CONF_DEVICE_PATH: self.device_path}) return ret @@ -181,7 +179,7 @@ async def test_zeroconf_discovery_znp(hass: HomeAssistant) -> None: assert result3["data"] == { CONF_DEVICE: { CONF_BAUDRATE: 115200, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, CONF_DEVICE_PATH: "socket://192.168.1.200:6638", }, CONF_RADIO_TYPE: "znp", @@ -238,6 +236,8 @@ async def test_zigate_via_zeroconf(setup_entry_mock, hass: HomeAssistant) -> Non assert result4["data"] == { CONF_DEVICE: { CONF_DEVICE_PATH: "socket://192.168.1.200:1234", + CONF_BAUDRATE: 115200, + CONF_FLOW_CONTROL: None, }, CONF_RADIO_TYPE: "zigate", } @@ -287,7 +287,7 @@ async def test_efr32_via_zeroconf(hass: HomeAssistant) -> None: CONF_DEVICE: { CONF_DEVICE_PATH: "socket://192.168.1.200:1234", CONF_BAUDRATE: 115200, - CONF_FLOWCONTROL: "software", + CONF_FLOW_CONTROL: None, }, CONF_RADIO_TYPE: "ezsp", } @@ -304,7 +304,7 @@ async def test_discovery_via_zeroconf_ip_change(hass: HomeAssistant) -> None: CONF_DEVICE: { CONF_DEVICE_PATH: "socket://192.168.1.5:6638", CONF_BAUDRATE: 115200, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, } }, ) @@ -328,7 +328,7 @@ async def test_discovery_via_zeroconf_ip_change(hass: HomeAssistant) -> None: assert entry.data[CONF_DEVICE] == { CONF_DEVICE_PATH: "socket://192.168.1.22:6638", CONF_BAUDRATE: 115200, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, } @@ -483,6 +483,8 @@ async def test_zigate_discovery_via_usb(probe_mock, hass: HomeAssistant) -> None assert result4["data"] == { "device": { "path": "/dev/ttyZIGBEE", + "baudrate": 115200, + "flow_control": None, }, CONF_RADIO_TYPE: "zigate", } @@ -555,7 +557,7 @@ async def test_discovery_via_usb_path_changes(hass: HomeAssistant) -> None: CONF_DEVICE: { CONF_DEVICE_PATH: "/dev/ttyUSB1", CONF_BAUDRATE: 115200, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, } }, ) @@ -579,7 +581,7 @@ async def test_discovery_via_usb_path_changes(hass: HomeAssistant) -> None: assert entry.data[CONF_DEVICE] == { CONF_DEVICE_PATH: "/dev/ttyZIGBEE", CONF_BAUDRATE: 115200, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, } @@ -754,6 +756,8 @@ async def test_user_flow(hass: HomeAssistant) -> None: assert result2["data"] == { "device": { "path": port.device, + CONF_BAUDRATE: 115200, + CONF_FLOW_CONTROL: None, }, CONF_RADIO_TYPE: "deconz", } @@ -773,7 +777,11 @@ async def test_user_flow_not_detected(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, - data={zigpy.config.CONF_DEVICE_PATH: port_select}, + data={ + zigpy.config.CONF_DEVICE_PATH: port_select, + CONF_BAUDRATE: 115200, + CONF_FLOW_CONTROL: None, + }, ) assert result["type"] == FlowResultType.FORM @@ -951,31 +959,6 @@ async def test_user_port_config(probe_mock, hass: HomeAssistant) -> None: assert probe_mock.await_count == 1 -@pytest.mark.parametrize( - ("old_type", "new_type"), - [ - ("ezsp", "ezsp"), - ("ti_cc", "znp"), # only one that should change - ("znp", "znp"), - ("deconz", "deconz"), - ], -) -async def test_migration_ti_cc_to_znp( - old_type, new_type, hass: HomeAssistant, config_entry: MockConfigEntry -) -> None: - """Test zigpy-cc to zigpy-znp config migration.""" - config_entry.data = {**config_entry.data, CONF_RADIO_TYPE: old_type} - config_entry.version = 2 - config_entry.add_to_hass(hass) - - with patch("homeassistant.components.zha.async_setup_entry", return_value=True): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.version > 2 - assert config_entry.data[CONF_RADIO_TYPE] == new_type - - @pytest.mark.parametrize("onboarded", [True, False]) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_hardware(onboarded, hass: HomeAssistant) -> None: @@ -1022,7 +1005,7 @@ async def test_hardware(onboarded, hass: HomeAssistant) -> None: assert result3["data"] == { CONF_DEVICE: { CONF_BAUDRATE: 115200, - CONF_FLOWCONTROL: "hardware", + CONF_FLOW_CONTROL: "hardware", CONF_DEVICE_PATH: "/dev/ttyAMA1", }, CONF_RADIO_TYPE: "ezsp", @@ -1171,6 +1154,7 @@ async def test_formation_strategy_form_initial_network( @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) +@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_onboarding_auto_formation_new_hardware( mock_app, hass: HomeAssistant ) -> None: @@ -1577,7 +1561,7 @@ async def test_options_flow_defaults( CONF_DEVICE: { CONF_DEVICE_PATH: "/dev/ttyUSB0", CONF_BAUDRATE: 12345, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, }, CONF_RADIO_TYPE: "znp", }, @@ -1645,7 +1629,7 @@ async def test_options_flow_defaults( # Change everything CONF_DEVICE_PATH: "/dev/new_serial_port", CONF_BAUDRATE: 54321, - CONF_FLOWCONTROL: "software", + CONF_FLOW_CONTROL: "software", }, ) @@ -1668,7 +1652,7 @@ async def test_options_flow_defaults( CONF_DEVICE: { CONF_DEVICE_PATH: "/dev/new_serial_port", CONF_BAUDRATE: 54321, - CONF_FLOWCONTROL: "software", + CONF_FLOW_CONTROL: "software", }, CONF_RADIO_TYPE: "znp", } @@ -1697,7 +1681,7 @@ async def test_options_flow_defaults_socket(hass: HomeAssistant) -> None: CONF_DEVICE: { CONF_DEVICE_PATH: "socket://localhost:5678", CONF_BAUDRATE: 12345, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, }, CONF_RADIO_TYPE: "znp", }, @@ -1766,7 +1750,7 @@ async def test_options_flow_restarts_running_zha_if_cancelled( CONF_DEVICE: { CONF_DEVICE_PATH: "socket://localhost:5678", CONF_BAUDRATE: 12345, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, }, CONF_RADIO_TYPE: "znp", }, @@ -1821,7 +1805,7 @@ async def test_options_flow_migration_reset_old_adapter( CONF_DEVICE: { CONF_DEVICE_PATH: "/dev/serial/by-id/old_radio", CONF_BAUDRATE: 12345, - CONF_FLOWCONTROL: None, + CONF_FLOW_CONTROL: None, }, CONF_RADIO_TYPE: "znp", }, @@ -1954,3 +1938,28 @@ async def test_discovery_wrong_firmware_installed(hass: HomeAssistant) -> None: assert result["type"] == FlowResultType.ABORT assert result["reason"] == "wrong_firmware_installed" + + +@pytest.mark.parametrize( + ("old_type", "new_type"), + [ + ("ezsp", "ezsp"), + ("ti_cc", "znp"), # only one that should change + ("znp", "znp"), + ("deconz", "deconz"), + ], +) +async def test_migration_ti_cc_to_znp( + old_type: str, new_type: str, hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test zigpy-cc to zigpy-znp config migration.""" + config_entry.data = {**config_entry.data, CONF_RADIO_TYPE: old_type} + config_entry.version = 2 + config_entry.add_to_hass(hass) + + with patch("homeassistant.components.zha.async_setup_entry", return_value=True): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.version > 2 + assert config_entry.data[CONF_RADIO_TYPE] == new_type diff --git a/tests/components/zha/test_fan.py b/tests/components/zha/test_fan.py index 737604482d8..7d45960d576 100644 --- a/tests/components/zha/test_fan.py +++ b/tests/components/zha/test_fan.py @@ -222,10 +222,11 @@ async def test_fan( # set invalid preset_mode from HA cluster.write_attributes.reset_mock() - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await async_set_preset_mode( hass, entity_id, preset_mode="invalid does not exist" ) + assert exc.value.translation_key == "not_valid_preset_mode" assert len(cluster.write_attributes.mock_calls) == 0 # test adding new fan to the network and HA @@ -624,10 +625,11 @@ async def test_fan_ikea( # set invalid preset_mode from HA cluster.write_attributes.reset_mock() - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await async_set_preset_mode( hass, entity_id, preset_mode="invalid does not exist" ) + assert exc.value.translation_key == "not_valid_preset_mode" assert len(cluster.write_attributes.mock_calls) == 0 # test adding new fan to the network and HA @@ -813,8 +815,9 @@ async def test_fan_kof( # set invalid preset_mode from HA cluster.write_attributes.reset_mock() - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) + assert exc.value.translation_key == "not_valid_preset_mode" assert len(cluster.write_attributes.mock_calls) == 0 # test adding new fan to the network and HA diff --git a/tests/components/zha/test_gateway.py b/tests/components/zha/test_gateway.py index 2a0a241c864..4f520920704 100644 --- a/tests/components/zha/test_gateway.py +++ b/tests/components/zha/test_gateway.py @@ -4,22 +4,21 @@ from unittest.mock import MagicMock, patch import pytest from zigpy.application import ControllerApplication -import zigpy.exceptions import zigpy.profiles.zha as zha import zigpy.zcl.clusters.general as general import zigpy.zcl.clusters.lighting as lighting -from homeassistant.components.zha.core.const import RadioType -from homeassistant.components.zha.core.device import ZHADevice +from homeassistant.components.zha.core.gateway import ZHAGateway from homeassistant.components.zha.core.group import GroupMember from homeassistant.components.zha.core.helpers import get_zha_gateway from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady from .common import async_find_group_entity_id from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from tests.common import MockConfigEntry + IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" @@ -224,101 +223,6 @@ async def test_gateway_create_group_with_id( assert zha_group.group_id == 0x1234 -@patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_devices", - MagicMock(), -) -@patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_groups", - MagicMock(), -) -@patch("homeassistant.components.zha.core.gateway.STARTUP_FAILURE_DELAY_S", 0.01) -@pytest.mark.parametrize( - "startup_effect", - [ - [asyncio.TimeoutError(), FileNotFoundError(), None], - [asyncio.TimeoutError(), None], - [None], - ], -) -async def test_gateway_initialize_success( - startup_effect: list[Exception | None], - hass: HomeAssistant, - device_light_1: ZHADevice, - coordinator: ZHADevice, - zigpy_app_controller: ControllerApplication, -) -> None: - """Test ZHA initializing the gateway successfully.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - - zigpy_app_controller.startup.side_effect = startup_effect - zigpy_app_controller.startup.reset_mock() - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ): - await zha_gateway.async_initialize() - - assert zigpy_app_controller.startup.call_count == len(startup_effect) - device_light_1.async_cleanup_handles() - - -@patch("homeassistant.components.zha.core.gateway.STARTUP_FAILURE_DELAY_S", 0.01) -async def test_gateway_initialize_failure( - hass: HomeAssistant, - device_light_1: ZHADevice, - coordinator: ZHADevice, - zigpy_app_controller: ControllerApplication, -) -> None: - """Test ZHA failing to initialize the gateway.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - - zigpy_app_controller.startup.side_effect = [ - asyncio.TimeoutError(), - RuntimeError(), - FileNotFoundError(), - ] - zigpy_app_controller.startup.reset_mock() - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ), pytest.raises(FileNotFoundError): - await zha_gateway.async_initialize() - - assert zigpy_app_controller.startup.call_count == 3 - - -@patch("homeassistant.components.zha.core.gateway.STARTUP_FAILURE_DELAY_S", 0.01) -async def test_gateway_initialize_failure_transient( - hass: HomeAssistant, - device_light_1: ZHADevice, - coordinator: ZHADevice, - zigpy_app_controller: ControllerApplication, -) -> None: - """Test ZHA failing to initialize the gateway but with a transient error.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - - zigpy_app_controller.startup.side_effect = [ - RuntimeError(), - zigpy.exceptions.TransientConnectionError(), - ] - zigpy_app_controller.startup.reset_mock() - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ), pytest.raises(ConfigEntryNotReady): - await zha_gateway.async_initialize() - - # Initialization immediately stops and is retried after TransientConnectionError - assert zigpy_app_controller.startup.call_count == 2 - - @patch( "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_devices", MagicMock(), @@ -340,22 +244,25 @@ async def test_gateway_initialize_bellows_thread( thread_state: bool, config_override: dict, hass: HomeAssistant, - coordinator: ZHADevice, zigpy_app_controller: ControllerApplication, + config_entry: MockConfigEntry, ) -> None: """Test ZHA disabling the UART thread when connecting to a TCP coordinator.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None + config_entry.data = dict(config_entry.data) + config_entry.data["device"]["path"] = device_path + config_entry.add_to_hass(hass) - zha_gateway.config_entry.data = dict(zha_gateway.config_entry.data) - zha_gateway.config_entry.data["device"]["path"] = device_path - zha_gateway._config.setdefault("zigpy_config", {}).update(config_override) + zha_gateway = ZHAGateway(hass, {"zigpy_config": config_override}, config_entry) - await zha_gateway.async_initialize() + with patch( + "bellows.zigbee.application.ControllerApplication.new", + return_value=zigpy_app_controller, + ) as mock_new: + await zha_gateway.async_initialize() - RadioType.ezsp.controller.new.mock_calls[-1].kwargs["config"][ - "use_thread" - ] is thread_state + mock_new.mock_calls[-1].kwargs["config"]["use_thread"] is thread_state + + await zha_gateway.shutdown() @pytest.mark.parametrize( @@ -373,15 +280,14 @@ async def test_gateway_force_multi_pan_channel( config_override: dict, expected_channel: int | None, hass: HomeAssistant, - coordinator, + config_entry: MockConfigEntry, ) -> None: """Test ZHA disabling the UART thread when connecting to a TCP coordinator.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None + config_entry.data = dict(config_entry.data) + config_entry.data["device"]["path"] = device_path + config_entry.add_to_hass(hass) - zha_gateway.config_entry.data = dict(zha_gateway.config_entry.data) - zha_gateway.config_entry.data["device"]["path"] = device_path - zha_gateway._config.setdefault("zigpy_config", {}).update(config_override) + zha_gateway = ZHAGateway(hass, {"zigpy_config": config_override}, config_entry) _, config = zha_gateway.get_application_controller_data() assert config["network"]["channel"] == expected_channel diff --git a/tests/components/zha/test_init.py b/tests/components/zha/test_init.py index ad6ab4e351e..c2e9469c239 100644 --- a/tests/components/zha/test_init.py +++ b/tests/components/zha/test_init.py @@ -1,5 +1,6 @@ """Tests for ZHA integration init.""" import asyncio +import typing from unittest.mock import AsyncMock, Mock, patch import pytest @@ -9,6 +10,7 @@ from zigpy.exceptions import TransientConnectionError from homeassistant.components.zha.core.const import ( CONF_BAUDRATE, + CONF_FLOW_CONTROL, CONF_RADIO_TYPE, CONF_USB_PATH, DOMAIN, @@ -61,9 +63,8 @@ async def test_migration_from_v1_no_baudrate( assert config_entry_v1.data[CONF_RADIO_TYPE] == DATA_RADIO_TYPE assert CONF_DEVICE in config_entry_v1.data assert config_entry_v1.data[CONF_DEVICE][CONF_DEVICE_PATH] == DATA_PORT_PATH - assert CONF_BAUDRATE not in config_entry_v1.data[CONF_DEVICE] assert CONF_USB_PATH not in config_entry_v1.data - assert config_entry_v1.version == 3 + assert config_entry_v1.version == 4 @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @@ -80,7 +81,7 @@ async def test_migration_from_v1_with_baudrate( assert CONF_USB_PATH not in config_entry_v1.data assert CONF_BAUDRATE in config_entry_v1.data[CONF_DEVICE] assert config_entry_v1.data[CONF_DEVICE][CONF_BAUDRATE] == 115200 - assert config_entry_v1.version == 3 + assert config_entry_v1.version == 4 @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @@ -95,8 +96,7 @@ async def test_migration_from_v1_wrong_baudrate( assert CONF_DEVICE in config_entry_v1.data assert config_entry_v1.data[CONF_DEVICE][CONF_DEVICE_PATH] == DATA_PORT_PATH assert CONF_USB_PATH not in config_entry_v1.data - assert CONF_BAUDRATE not in config_entry_v1.data[CONF_DEVICE] - assert config_entry_v1.version == 3 + assert config_entry_v1.version == 4 @pytest.mark.skipif( @@ -149,23 +149,74 @@ async def test_setup_with_v3_cleaning_uri( mock_zigpy_connect: ControllerApplication, ) -> None: """Test migration of config entry from v3, applying corrections to the port path.""" - config_entry_v3 = MockConfigEntry( + config_entry_v4 = MockConfigEntry( domain=DOMAIN, data={ CONF_RADIO_TYPE: DATA_RADIO_TYPE, - CONF_DEVICE: {CONF_DEVICE_PATH: path, CONF_BAUDRATE: 115200}, + CONF_DEVICE: { + CONF_DEVICE_PATH: path, + CONF_BAUDRATE: 115200, + CONF_FLOW_CONTROL: None, + }, }, - version=3, + version=4, ) - config_entry_v3.add_to_hass(hass) + config_entry_v4.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry_v3.entry_id) + await hass.config_entries.async_setup(config_entry_v4.entry_id) await hass.async_block_till_done() - await hass.config_entries.async_unload(config_entry_v3.entry_id) + await hass.config_entries.async_unload(config_entry_v4.entry_id) - assert config_entry_v3.data[CONF_RADIO_TYPE] == DATA_RADIO_TYPE - assert config_entry_v3.data[CONF_DEVICE][CONF_DEVICE_PATH] == cleaned_path - assert config_entry_v3.version == 3 + assert config_entry_v4.data[CONF_RADIO_TYPE] == DATA_RADIO_TYPE + assert config_entry_v4.data[CONF_DEVICE][CONF_DEVICE_PATH] == cleaned_path + assert config_entry_v4.version == 4 + + +@pytest.mark.parametrize( + ( + "radio_type", + "old_baudrate", + "old_flow_control", + "new_baudrate", + "new_flow_control", + ), + [ + ("znp", None, None, 115200, None), + ("znp", None, "software", 115200, "software"), + ("znp", 57600, "software", 57600, "software"), + ("deconz", None, None, 38400, None), + ("deconz", 115200, None, 115200, None), + ], +) +@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) +async def test_migration_baudrate_and_flow_control( + radio_type: str, + old_baudrate: int, + old_flow_control: typing.Literal["hardware", "software", None], + new_baudrate: int, + new_flow_control: typing.Literal["hardware", "software", None], + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test baudrate and flow control migration.""" + config_entry.data = { + **config_entry.data, + CONF_RADIO_TYPE: radio_type, + CONF_DEVICE: { + CONF_BAUDRATE: old_baudrate, + CONF_FLOW_CONTROL: old_flow_control, + CONF_DEVICE_PATH: "/dev/null", + }, + } + config_entry.version = 3 + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.version > 3 + assert config_entry.data[CONF_DEVICE][CONF_BAUDRATE] == new_baudrate + assert config_entry.data[CONF_DEVICE][CONF_FLOW_CONTROL] == new_flow_control @patch( diff --git a/tests/components/zha/test_light.py b/tests/components/zha/test_light.py index 1ec70b74735..bd799187a19 100644 --- a/tests/components/zha/test_light.py +++ b/tests/components/zha/test_light.py @@ -40,7 +40,10 @@ from .common import ( ) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import async_fire_time_changed +from tests.common import ( + async_fire_time_changed, + async_mock_load_restore_state_from_storage, +) IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e9" @@ -1921,3 +1924,76 @@ async def test_group_member_assume_state( await zha_gateway.async_remove_zigpy_group(zha_group.group_id) assert hass.states.get(group_entity_id) is None assert entity_registry.async_get(group_entity_id) is None + + +@pytest.mark.parametrize( + ("restored_state", "expected_state"), + [ + ( + STATE_ON, + { + "brightness": None, + "off_with_transition": None, + "off_brightness": None, + "color_mode": ColorMode.XY, # color_mode defaults to what the light supports when restored with ON state + "color_temp": None, + "xy_color": None, + "hs_color": None, + "effect": None, + }, + ), + ( + STATE_OFF, + { + "brightness": None, + "off_with_transition": None, + "off_brightness": None, + "color_mode": None, + "color_temp": None, + "xy_color": None, + "hs_color": None, + "effect": None, + }, + ), + ], +) +async def test_restore_light_state( + hass: HomeAssistant, + zigpy_device_mock, + core_rs, + zha_device_restored, + restored_state, + expected_state, +) -> None: + """Test ZHA light restores without throwing an error when attributes are None.""" + + # restore state with None values + attributes = { + "brightness": None, + "off_with_transition": None, + "off_brightness": None, + "color_mode": None, + "color_temp": None, + "xy_color": None, + "hs_color": None, + "effect": None, + } + + entity_id = "light.fakemanufacturer_fakemodel_light" + core_rs( + entity_id, + state=restored_state, + attributes=attributes, + ) + await async_mock_load_restore_state_from_storage(hass) + + zigpy_device = zigpy_device_mock(LIGHT_COLOR) + zha_device = await zha_device_restored(zigpy_device) + entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) + + assert entity_id is not None + assert hass.states.get(entity_id).state == restored_state + + # compare actual restored state to expected state + for attribute, expected_value in expected_state.items(): + assert hass.states.get(entity_id).attributes.get(attribute) == expected_value diff --git a/tests/components/zha/test_repairs.py b/tests/components/zha/test_repairs.py index 9c79578843c..d168e2e57b1 100644 --- a/tests/components/zha/test_repairs.py +++ b/tests/components/zha/test_repairs.py @@ -95,6 +95,7 @@ def test_detect_radio_hardware_failure(hass: HomeAssistant) -> None: assert _detect_radio_hardware(hass, SKYCONNECT_DEVICE) == HardwareType.OTHER +@patch("homeassistant.components.zha.STARTUP_RETRIES", new=1) @pytest.mark.parametrize( ("detected_hardware", "expected_learn_more_url"), [ @@ -188,6 +189,7 @@ async def test_multipan_firmware_no_repair_on_probe_failure( assert issue is None +@patch("homeassistant.components.zha.STARTUP_RETRIES", new=1) async def test_multipan_firmware_retry_on_probe_ezsp( hass: HomeAssistant, config_entry: MockConfigEntry, @@ -312,6 +314,8 @@ async def test_inconsistent_settings_keep_new( data = await resp.json() assert data["type"] == "create_entry" + await hass.config_entries.async_unload(config_entry.entry_id) + assert ( issue_registry.async_get_issue( domain=DOMAIN, @@ -388,6 +392,8 @@ async def test_inconsistent_settings_restore_old( data = await resp.json() assert data["type"] == "create_entry" + await hass.config_entries.async_unload(config_entry.entry_id) + assert ( issue_registry.async_get_issue( domain=DOMAIN, diff --git a/tests/components/zha/test_websocket_api.py b/tests/components/zha/test_websocket_api.py index d914c88c0c2..44006ea6ca1 100644 --- a/tests/components/zha/test_websocket_api.py +++ b/tests/components/zha/test_websocket_api.py @@ -62,7 +62,7 @@ from .conftest import ( ) from .data import BASE_CUSTOM_CONFIGURATION, CONFIG_WITH_ALARM_OPTIONS -from tests.common import MockUser +from tests.common import MockConfigEntry, MockUser IEEE_SWITCH_DEVICE = "01:2d:6f:00:0a:90:69:e7" IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" @@ -295,10 +295,12 @@ async def test_get_zha_config_with_alarm( async def test_update_zha_config( - zha_client, app_controller: ControllerApplication + hass: HomeAssistant, + config_entry: MockConfigEntry, + zha_client, + app_controller: ControllerApplication, ) -> None: """Test updating ZHA custom configuration.""" - configuration: dict = deepcopy(CONFIG_WITH_ALARM_OPTIONS) configuration["data"]["zha_options"]["default_light_transition"] = 10 @@ -312,10 +314,12 @@ async def test_update_zha_config( msg = await zha_client.receive_json() assert msg["success"] - await zha_client.send_json({ID: 6, TYPE: "zha/configuration"}) - msg = await zha_client.receive_json() - configuration = msg["result"] - assert configuration == configuration + await zha_client.send_json({ID: 6, TYPE: "zha/configuration"}) + msg = await zha_client.receive_json() + configuration = msg["result"] + assert configuration == configuration + + await hass.config_entries.async_unload(config_entry.entry_id) async def test_device_not_found(zha_client) -> None: diff --git a/tests/components/zha/zha_devices_list.py b/tests/components/zha/zha_devices_list.py index 44f01555b19..65ef55c4711 100644 --- a/tests/components/zha/zha_devices_list.py +++ b/tests/components/zha/zha_devices_list.py @@ -1492,7 +1492,7 @@ DEVICES = [ DEV_SIG_ENT_MAP: { ("light", "00:11:22:33:44:55:66:77-1"): { DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", DEV_SIG_ENT_MAP_ID: "light.jasco_products_45852_light", }, ("button", "00:11:22:33:44:55:66:77-1-3"): { @@ -1547,7 +1547,7 @@ DEVICES = [ DEV_SIG_ENT_MAP: { ("light", "00:11:22:33:44:55:66:77-1"): { DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", DEV_SIG_ENT_MAP_ID: "light.jasco_products_45856_light", }, ("button", "00:11:22:33:44:55:66:77-1-3"): { @@ -1602,7 +1602,7 @@ DEVICES = [ DEV_SIG_ENT_MAP: { ("light", "00:11:22:33:44:55:66:77-1"): { DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", + DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", DEV_SIG_ENT_MAP_ID: "light.jasco_products_45857_light", }, ("button", "00:11:22:33:44:55:66:77-1-3"): { @@ -2178,6 +2178,16 @@ DEVICES = [ DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_power_factor", }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_summation_delivered", + }, + ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { + DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], + DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", + DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_instantaneous_demand", + }, ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { DEV_SIG_CLUSTER_HANDLERS: ["basic"], DEV_SIG_ENT_MAP_CLASS: "RSSISensor", diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index 5a424b38c5b..f2c3abd362a 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -385,6 +385,12 @@ def climate_eurotronic_spirit_z_state_fixture(): return json.loads(load_fixture("zwave_js/climate_eurotronic_spirit_z_state.json")) +@pytest.fixture(name="climate_heatit_z_trm6_state", scope="session") +def climate_heatit_z_trm6_state_fixture(): + """Load the climate HEATIT Z-TRM6 thermostat node state fixture data.""" + return json.loads(load_fixture("zwave_js/climate_heatit_z_trm6_state.json")) + + @pytest.fixture(name="climate_heatit_z_trm3_state", scope="session") def climate_heatit_z_trm3_state_fixture(): """Load the climate HEATIT Z-TRM3 thermostat node state fixture data.""" @@ -897,6 +903,14 @@ def climate_eurotronic_spirit_z_fixture(client, climate_eurotronic_spirit_z_stat return node +@pytest.fixture(name="climate_heatit_z_trm6") +def climate_heatit_z_trm6_fixture(client, climate_heatit_z_trm6_state): + """Mock a climate radio HEATIT Z-TRM6 node.""" + node = Node(client, copy.deepcopy(climate_heatit_z_trm6_state)) + client.driver.controller.nodes[node.node_id] = node + return node + + @pytest.fixture(name="climate_heatit_z_trm3_no_value") def climate_heatit_z_trm3_no_value_fixture( client, climate_heatit_z_trm3_no_value_state diff --git a/tests/components/zwave_js/fixtures/climate_heatit_z_trm6_state.json b/tests/components/zwave_js/fixtures/climate_heatit_z_trm6_state.json new file mode 100644 index 00000000000..ffc7b25fda4 --- /dev/null +++ b/tests/components/zwave_js/fixtures/climate_heatit_z_trm6_state.json @@ -0,0 +1,2120 @@ +{ + "nodeId": 101, + "index": 0, + "installerIcon": 4608, + "userIcon": 4609, + "status": 4, + "ready": true, + "isListening": true, + "isRouting": true, + "isSecure": true, + "manufacturerId": 411, + "productId": 12289, + "productType": 48, + "firmwareVersion": "1.0.6", + "zwavePlusVersion": 2, + "location": "**REDACTED**", + "deviceConfig": { + "filename": "/data/db/devices/0x019b/z-trm6.json", + "isEmbedded": true, + "manufacturer": "Heatit", + "manufacturerId": 411, + "label": "Z-TRM6", + "description": "Floor Thermostat", + "devices": [ + { + "productType": 48, + "productId": 12289 + } + ], + "firmwareVersion": { + "min": "0.0", + "max": "255.255" + }, + "preferred": false, + "associations": {}, + "paramInformation": { + "_map": {} + }, + "compat": { + "overrideFloatEncoding": { + "size": 2 + } + }, + "metadata": { + "inclusion": "Add\nThe primary controller/gateway has a mode for adding devices. Please refer to your primary controller manual on how to set the primary controller in add mode. The device may only be added to the network if the primary controller is in add mode.\nAn always listening node must be powered continuously and reside in a fixed position in the installation to secure the routing table. Adding the device within a 2 meter range from the gateway can minimize faults during the Interview process.\n\nStandard (Manual)\nAdd mode is indicated on the device by rotating LED segments on the display. It indicates this for 90 seconds until a timeout occurs, or until the device has been added to the network. Configuration mode can also be cancelled by performing the same procedure used for starting\nConfiguration mode.\n1. Hold the Center button for 5 seconds.\nThe display will show \u201cOFF\u201d.\n2. Press the \u201d+\u201d button once to see \u201cCON\u201d in the display.\n3. Start the add device process in your primary controller.\n4. Start the configuration mode on the thermostat by holding the Center button for approximately 2 seconds.\n\nThe device is now ready for use with default settings.\nIf inclusion fails, please perform a \u201dremove device\u201d process and try again. If inclusion fails again, please see \u201cFactory reset\u201d", + "exclusion": "Remove\nThe primary controller/gateway has a mode for removing devices. Please refer to your primary controller manual on how to set the primary controller in remove mode. The device may only be removed from the network if the primary controller is in remove mode.\nWhen the device is removed from the network, it will NOT revert to factory settings.\n\nStandard (Manual)\nRemove mode is indicated on the device by rotating LED segments on the display. It indicates this for 90 seconds until a timeout occurs, or until the device has been removed from the network. Configuration mode can also be cancelled by performing the same procedure used for starting\nConfiguration mode.\n1. Hold the Center button for 5 seconds.\nThe display will show \u201cOFF\u201d.\n2. Press the \u201d+\u201d button once to see \u201cCON\u201d in the display.\n3. Start the remove device process in your primary controller.\n4. Start the configuration mode on the thermostat by holding the Center button for approximately 2 seconds.\n\nNB! When the device is removed from the gateway, the parameters are not reset. To reset the parameters, see Chapter \u201dFactory reset\u201d", + "reset": "Enter the menu by holding the Center button for about 5 seconds, navigate in the menu with the \u201d+\u201d button til you see FACT. Press the Center button until you see \u201c-- --\u201d blinking in the display, then hold for about 5 seconds to perform a reset.\nYou may also initiate a reset by holding the Right and Center buttons for 60 seconds.\n\nWhen either of these procedures has been performed, the thermostat will perform a complete factory reset. The device will display \u201cRES\u201d for 5 seconds while performing a factory reset. When \u201cRES\u201d is no longer displayed, the thermostat has been reset.\n\nPlease use this procedure only when the network primary controller is missing or otherwise inoperable", + "manual": "https://media.heatit.com/2926" + } + }, + "label": "Z-TRM6", + "endpointCountIsDynamic": false, + "endpointsHaveIdenticalCapabilities": false, + "individualEndpointCount": 4, + "aggregatedEndpointCount": 0, + "interviewAttempts": 1, + "isFrequentListening": false, + "maxDataRate": 100000, + "supportedDataRates": [40000, 100000], + "protocolVersion": 3, + "supportsBeaming": true, + "supportsSecurity": false, + "nodeType": 1, + "zwavePlusNodeType": 0, + "zwavePlusRoleType": 5, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing Slave" + }, + "generic": { + "key": 8, + "label": "Thermostat" + }, + "specific": { + "key": 6, + "label": "General Thermostat V2" + }, + "mandatorySupportedCCs": [32, 114, 64, 67, 134], + "mandatoryControlledCCs": [] + }, + "interviewStage": "Complete", + "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x019b:0x0030:0x3001:1.0.6", + "statistics": { + "commandsTX": 268, + "commandsRX": 399, + "commandsDroppedRX": 0, + "commandsDroppedTX": 0, + "timeoutResponse": 4, + "lastSeen": "2023-11-20T16:45:28.117Z", + "lwr": { + "protocolDataRate": 3, + "repeaters": [], + "rssi": -51, + "repeaterRSSI": [] + }, + "rtt": 32.4, + "rssi": -50 + }, + "highestSecurityClass": 1, + "isControllerNode": false, + "keepAwake": false, + "lastSeen": "2023-11-20T16:45:28.117Z", + "values": [ + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 1, + "propertyName": "Local Control", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Local Control", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Enable", + "1": "Disable" + }, + "valueSize": 1, + "format": 1, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 2, + "propertyName": "Sensor Mode", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Sensor Mode", + "default": 1, + "min": 0, + "max": 5, + "states": { + "0": "Floor", + "1": "Internal", + "2": "Internal with floor limit", + "3": "External", + "4": "External with floor limit", + "5": "Power regulator" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 3, + "propertyName": "External Sensor Resistance", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "External Sensor Resistance", + "default": 0, + "min": 0, + "max": 7, + "states": { + "0": "10", + "1": "12", + "2": "15", + "3": "22", + "4": "33", + "5": "47", + "6": "6.8", + "7": "100" + }, + "unit": "k\u03a9", + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 4, + "propertyName": "Internal Sensor Min Temp Limit", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Internal Sensor Min Temp Limit", + "default": 50, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 50 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 5, + "propertyName": "Floor Sensor Min Temp Limit", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Floor Sensor Min Temp Limit", + "default": 50, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 50 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 6, + "propertyName": "External Sensor Min Temp Limit", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "External Sensor Min Temp Limit", + "default": 50, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 50 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 7, + "propertyName": "Internal Sensor Max Temp Limit", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Internal Sensor Max Temp Limit", + "default": 400, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 400 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 8, + "propertyName": "Floor Sensor Max Temp Limit", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Floor Sensor Max Temp Limit", + "default": 400, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 400 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyName": "External Sensor Max Temp Limit", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "External Sensor Max Temp Limit", + "default": 400, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 400 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 10, + "propertyName": "Internal Sensor Calibration", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Internal Sensor Calibration", + "default": 0, + "min": -60, + "max": 60, + "unit": "0.1 \u00b0C", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 11, + "propertyName": "Floor Sensor Calibration", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Floor Sensor Calibration", + "default": 0, + "min": -60, + "max": 60, + "unit": "0.1 \u00b0C", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 12, + "propertyName": "External Sensor Calibration", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "External Sensor Calibration", + "default": 0, + "min": -60, + "max": 60, + "unit": "0.1 \u00b0C", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 13, + "propertyName": "Regulation Mode", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Regulation Mode", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Hysteresis", + "1": "PWM" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 14, + "propertyName": "Temperature Control Hysteresis", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Temperature Control Hysteresis", + "default": 5, + "min": 3, + "max": 30, + "unit": "0.1 \u00b0C", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 5 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 15, + "propertyName": "Temperature Display", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Temperature Display", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Setpoint", + "1": "Measured" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 16, + "propertyName": "Active Display Brightness", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Active Display Brightness", + "default": 10, + "min": 1, + "max": 10, + "unit": "10 %", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 10 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 17, + "propertyName": "Standby Display Brightness", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Standby Display Brightness", + "default": 5, + "min": 1, + "max": 10, + "unit": "10 %", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 5 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 18, + "propertyName": "Temperature Report Interval", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Temperature Report Interval", + "default": 840, + "min": 30, + "max": 65535, + "unit": "seconds", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 840 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 19, + "propertyName": "Temperature Report Hysteresis", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Temperature Report Hysteresis", + "default": 10, + "min": 1, + "max": 100, + "unit": "0.1 \u00b0C", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 10 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 20, + "propertyName": "Meter Report Interval", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Meter Report Interval", + "default": 840, + "min": 30, + "max": 65535, + "unit": "seconds", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 840 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 21, + "propertyName": "Turn On Delay After Error", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Turn On Delay After Error", + "default": 0, + "min": 0, + "max": 65535, + "states": { + "0": "Stay off (Display error)" + }, + "unit": "seconds", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 22, + "propertyName": "Heating Setpoint", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Heating Setpoint", + "default": 210, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 190 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 23, + "propertyName": "Cooling Setpoint", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Cooling Setpoint", + "default": 180, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 180 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 24, + "propertyName": "Eco Setpoint", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Eco Setpoint", + "default": 180, + "min": 50, + "max": 400, + "unit": "0.1 \u00b0C", + "valueSize": 2, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 180 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 25, + "propertyName": "Power Regulator Active Time", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Power Regulator Active Time", + "default": 2, + "min": 1, + "max": 10, + "unit": "10 %", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 6 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 26, + "propertyName": "Thermostat State Report Interval", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Thermostat State Report Interval", + "default": 43200, + "min": 0, + "max": 65535, + "states": { + "0": "Changes only" + }, + "unit": "seconds", + "valueSize": 2, + "format": 1, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 43200 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 27, + "propertyName": "Operating Mode", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Operating Mode", + "default": 1, + "min": 0, + "max": 3, + "states": { + "0": "Off", + "1": "Heating", + "2": "Cooling", + "3": "Eco" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 28, + "propertyName": "Open Window Detection", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Open Window Detection", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Disabled", + "1": "Enabled" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 29, + "propertyName": "Load Power", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Load Power", + "default": 0, + "min": 0, + "max": 99, + "states": { + "0": "Use measured value" + }, + "unit": "100 W", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "manufacturerId", + "propertyName": "manufacturerId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Manufacturer ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 411 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productType", + "propertyName": "productType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product type", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 48 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productId", + "propertyName": "productId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 12289 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "libraryType", + "propertyName": "libraryType", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Library type", + "states": { + "0": "Unknown", + "1": "Static Controller", + "2": "Controller", + "3": "Enhanced Slave", + "4": "Slave", + "5": "Installer", + "6": "Routing Slave", + "7": "Bridge Controller", + "8": "Device under Test", + "9": "N/A", + "10": "AV Remote", + "11": "AV Device" + }, + "stateful": true, + "secret": false + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "protocolVersion", + "propertyName": "protocolVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "7.18" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "firmwareVersions", + "propertyName": "firmwareVersions", + "ccVersion": 3, + "metadata": { + "type": "string[]", + "readable": true, + "writeable": false, + "label": "Z-Wave chip firmware versions", + "stateful": true, + "secret": false + }, + "value": ["1.0", "2.5"] + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hardwareVersion", + "propertyName": "hardwareVersion", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Z-Wave chip hardware version", + "stateful": true, + "secret": false + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "sdkVersion", + "propertyName": "sdkVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "SDK version", + "stateful": true, + "secret": false + }, + "value": "7.18.1" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationFrameworkAPIVersion", + "propertyName": "applicationFrameworkAPIVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave application framework API version", + "stateful": true, + "secret": false + }, + "value": "10.18.1" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationFrameworkBuildNumber", + "propertyName": "applicationFrameworkBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave application framework API build number", + "stateful": true, + "secret": false + }, + "value": 273 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hostInterfaceVersion", + "propertyName": "hostInterfaceVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Serial API version", + "stateful": true, + "secret": false + }, + "value": "unused" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hostInterfaceBuildNumber", + "propertyName": "hostInterfaceBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Serial API build number", + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "zWaveProtocolVersion", + "propertyName": "zWaveProtocolVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "7.18.1" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "zWaveProtocolBuildNumber", + "propertyName": "zWaveProtocolBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol build number", + "stateful": true, + "secret": false + }, + "value": 273 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationVersion", + "propertyName": "applicationVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Application version", + "stateful": true, + "secret": false + }, + "value": "1.0.6" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationBuildNumber", + "propertyName": "applicationBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Application build number", + "stateful": true, + "secret": false + }, + "value": 273 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 3, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Period: Duration", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Sets the duration of an on/off period in 1/10th seconds. Must be set together with \"On/Off Cycle Count\"", + "label": "Node Identify - On/Off Period: Duration", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 3 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 4, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Cycle Count", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Sets the number of on/off periods. 0xff means infinite. Must be set together with \"On/Off Period duration\"", + "label": "Node Identify - On/Off Cycle Count", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 4 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 5, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Period: On time", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "This property is used to set the length of the On time during an On/Off period. It allows asymmetric On/Off periods. The value 0x00 MUST represent symmetric On/Off period (On time equal to Off time)", + "label": "Node Identify - On/Off Period: On time", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 5 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "value", + "propertyName": "value", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Indicator value", + "ccSpecific": { + "indicatorId": 0 + }, + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "identify", + "propertyName": "identify", + "ccVersion": 3, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Identify", + "states": { + "true": "Identify" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "timeout", + "propertyName": "timeout", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": true, + "label": "Timeout", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 1, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 65537, + "propertyName": "value", + "propertyKeyName": "Electric_kWh_Consumed", + "ccVersion": 5, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [kWh]", + "ccSpecific": { + "meterType": 1, + "scale": 0, + "rateType": 1 + }, + "unit": "kWh", + "stateful": true, + "secret": false + }, + "value": 0, + "nodeId": 101 + }, + { + "endpoint": 1, + "commandClass": 50, + "commandClassName": "Meter", + "property": "value", + "propertyKey": 66049, + "propertyName": "value", + "propertyKeyName": "Electric_W_Consumed", + "ccVersion": 5, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Electric Consumption [W]", + "ccSpecific": { + "meterType": 1, + "scale": 2, + "rateType": 1 + }, + "unit": "W", + "stateful": true, + "secret": false + }, + "value": 0, + "nodeId": 101 + }, + { + "endpoint": 1, + "commandClass": 50, + "commandClassName": "Meter", + "property": "reset", + "propertyName": "reset", + "ccVersion": 5, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Reset accumulated values", + "states": { + "true": "Reset" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 1, + "commandClass": 64, + "commandClassName": "Thermostat Mode", + "property": "mode", + "propertyName": "mode", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Thermostat mode", + "min": 0, + "max": 255, + "states": { + "0": "Off", + "1": "Heat", + "2": "Cool", + "11": "Energy heat" + }, + "stateful": true, + "secret": false + }, + "value": 1 + }, + { + "endpoint": 1, + "commandClass": 64, + "commandClassName": "Thermostat Mode", + "property": "manufacturerData", + "propertyName": "manufacturerData", + "ccVersion": 3, + "metadata": { + "type": "buffer", + "readable": true, + "writeable": false, + "label": "Manufacturer data", + "stateful": true, + "secret": false + } + }, + { + "endpoint": 1, + "commandClass": 66, + "commandClassName": "Thermostat Operating State", + "property": "state", + "propertyName": "state", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Operating state", + "min": 0, + "max": 255, + "states": { + "0": "Idle", + "1": "Heating", + "2": "Cooling", + "3": "Fan Only", + "4": "Pending Heat", + "5": "Pending Cool", + "6": "Vent/Economizer", + "7": "Aux Heating", + "8": "2nd Stage Heating", + "9": "2nd Stage Cooling", + "10": "2nd Stage Aux Heat", + "11": "3rd Stage Aux Heat" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 1, + "commandClass": 67, + "commandClassName": "Thermostat Setpoint", + "property": "setpoint", + "propertyKey": 1, + "propertyName": "setpoint", + "propertyKeyName": "Heating", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Setpoint (Heating)", + "ccSpecific": { + "setpointType": 1 + }, + "min": 5, + "max": 40, + "unit": "\u00b0C", + "stateful": true, + "secret": false + }, + "value": 19 + }, + { + "endpoint": 1, + "commandClass": 67, + "commandClassName": "Thermostat Setpoint", + "property": "setpoint", + "propertyKey": 2, + "propertyName": "setpoint", + "propertyKeyName": "Cooling", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Setpoint (Cooling)", + "ccSpecific": { + "setpointType": 2 + }, + "min": 5, + "max": 40, + "unit": "\u00b0C", + "stateful": true, + "secret": false + }, + "value": 18 + }, + { + "endpoint": 1, + "commandClass": 67, + "commandClassName": "Thermostat Setpoint", + "property": "setpoint", + "propertyKey": 11, + "propertyName": "setpoint", + "propertyKeyName": "Energy Save Heating", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Setpoint (Energy Save Heating)", + "ccSpecific": { + "setpointType": 11 + }, + "min": 5, + "max": 40, + "unit": "\u00b0C", + "stateful": true, + "secret": false + }, + "value": 18 + }, + { + "endpoint": 1, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Heat Alarm", + "propertyKey": "Heat sensor status", + "propertyName": "Heat Alarm", + "propertyKeyName": "Heat sensor status", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Heat sensor status", + "ccSpecific": { + "notificationType": 4 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "2": "Overheat detected" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 1, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Power Management", + "propertyKey": "Over-load status", + "propertyName": "Power Management", + "propertyKeyName": "Over-load status", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Over-load status", + "ccSpecific": { + "notificationType": 8 + }, + "min": 0, + "max": 255, + "states": { + "0": "idle", + "8": "Over-load detected" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 1, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmType", + "propertyName": "alarmType", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Type", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 1, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmLevel", + "propertyName": "alarmLevel", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Level", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 1, + "commandClass": 117, + "commandClassName": "Protection", + "property": "local", + "propertyName": "local", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Local protection state", + "states": { + "0": "Unprotected", + "1": "ProtectedBySequence", + "2": "NoOperationPossible" + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 2, + "commandClass": 49, + "commandClassName": "Multilevel Sensor", + "property": "Air temperature", + "propertyName": "Air temperature", + "ccVersion": 11, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Air temperature", + "ccSpecific": { + "sensorType": 1, + "scale": 0 + }, + "unit": "\u00b0C", + "stateful": true, + "secret": false + }, + "value": 22.5, + "nodeId": 101 + }, + { + "endpoint": 3, + "commandClass": 49, + "commandClassName": "Multilevel Sensor", + "property": "Air temperature", + "propertyName": "Air temperature", + "ccVersion": 11, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Air temperature", + "ccSpecific": { + "sensorType": 1, + "scale": 0 + }, + "unit": "\u00b0C", + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 4, + "commandClass": 49, + "commandClassName": "Multilevel Sensor", + "property": "Air temperature", + "propertyName": "Air temperature", + "ccVersion": 11, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Air temperature", + "ccSpecific": { + "sensorType": 1, + "scale": 0 + }, + "unit": "\u00b0C", + "stateful": true, + "secret": false + }, + "value": 21.9, + "nodeId": 101 + } + ], + "endpoints": [ + { + "nodeId": 101, + "index": 0, + "installerIcon": 4608, + "userIcon": 4609, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing Slave" + }, + "generic": { + "key": 8, + "label": "Thermostat" + }, + "specific": { + "key": 6, + "label": "General Thermostat V2" + }, + "mandatorySupportedCCs": [32, 114, 64, 67, 134], + "mandatoryControlledCCs": [] + }, + "commandClasses": [ + { + "id": 114, + "name": "Manufacturer Specific", + "version": 2, + "isSecure": true + }, + { + "id": 64, + "name": "Thermostat Mode", + "version": 3, + "isSecure": true + }, + { + "id": 67, + "name": "Thermostat Setpoint", + "version": 3, + "isSecure": true + }, + { + "id": 134, + "name": "Version", + "version": 3, + "isSecure": true + }, + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 85, + "name": "Transport Service", + "version": 2, + "isSecure": false + }, + { + "id": 152, + "name": "Security", + "version": 1, + "isSecure": true + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 3, + "isSecure": true + }, + { + "id": 112, + "name": "Configuration", + "version": 4, + "isSecure": true + }, + { + "id": 66, + "name": "Thermostat Operating State", + "version": 1, + "isSecure": true + }, + { + "id": 50, + "name": "Meter", + "version": 5, + "isSecure": true + }, + { + "id": 113, + "name": "Notification", + "version": 8, + "isSecure": true + }, + { + "id": 117, + "name": "Protection", + "version": 1, + "isSecure": true + }, + { + "id": 49, + "name": "Multilevel Sensor", + "version": 11, + "isSecure": true + }, + { + "id": 96, + "name": "Multi Channel", + "version": 4, + "isSecure": true + }, + { + "id": 142, + "name": "Multi Channel Association", + "version": 3, + "isSecure": true + }, + { + "id": 90, + "name": "Device Reset Locally", + "version": 1, + "isSecure": true + }, + { + "id": 135, + "name": "Indicator", + "version": 3, + "isSecure": true + }, + { + "id": 115, + "name": "Powerlevel", + "version": 1, + "isSecure": true + }, + { + "id": 122, + "name": "Firmware Update Meta Data", + "version": 5, + "isSecure": true + } + ] + }, + { + "nodeId": 101, + "index": 1, + "installerIcon": 4608, + "userIcon": 4609, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing Slave" + }, + "generic": { + "key": 8, + "label": "Thermostat" + }, + "specific": { + "key": 6, + "label": "General Thermostat V2" + }, + "mandatorySupportedCCs": [32, 114, 64, 67, 134], + "mandatoryControlledCCs": [] + }, + "commandClasses": [ + { + "id": 64, + "name": "Thermostat Mode", + "version": 3, + "isSecure": true + }, + { + "id": 67, + "name": "Thermostat Setpoint", + "version": 3, + "isSecure": true + }, + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 152, + "name": "Security", + "version": 1, + "isSecure": true + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 3, + "isSecure": true + }, + { + "id": 142, + "name": "Multi Channel Association", + "version": 3, + "isSecure": true + }, + { + "id": 66, + "name": "Thermostat Operating State", + "version": 1, + "isSecure": true + }, + { + "id": 117, + "name": "Protection", + "version": 1, + "isSecure": true + }, + { + "id": 50, + "name": "Meter", + "version": 5, + "isSecure": true + }, + { + "id": 113, + "name": "Notification", + "version": 8, + "isSecure": true + } + ] + }, + { + "nodeId": 101, + "index": 2, + "installerIcon": 3328, + "userIcon": 3329, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing Slave" + }, + "generic": { + "key": 33, + "label": "Multilevel Sensor" + }, + "specific": { + "key": 1, + "label": "Routing Multilevel Sensor" + }, + "mandatorySupportedCCs": [32, 49], + "mandatoryControlledCCs": [] + }, + "commandClasses": [ + { + "id": 49, + "name": "Multilevel Sensor", + "version": 11, + "isSecure": true + }, + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 152, + "name": "Security", + "version": 1, + "isSecure": true + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 3, + "isSecure": true + }, + { + "id": 142, + "name": "Multi Channel Association", + "version": 3, + "isSecure": true + } + ] + }, + { + "nodeId": 101, + "index": 3, + "installerIcon": 3328, + "userIcon": 3329, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing Slave" + }, + "generic": { + "key": 33, + "label": "Multilevel Sensor" + }, + "specific": { + "key": 1, + "label": "Routing Multilevel Sensor" + }, + "mandatorySupportedCCs": [32, 49], + "mandatoryControlledCCs": [] + }, + "commandClasses": [ + { + "id": 49, + "name": "Multilevel Sensor", + "version": 11, + "isSecure": true + }, + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 152, + "name": "Security", + "version": 1, + "isSecure": true + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 3, + "isSecure": true + }, + { + "id": 142, + "name": "Multi Channel Association", + "version": 3, + "isSecure": true + } + ] + }, + { + "nodeId": 101, + "index": 4, + "installerIcon": 3328, + "userIcon": 3329, + "deviceClass": { + "basic": { + "key": 4, + "label": "Routing Slave" + }, + "generic": { + "key": 33, + "label": "Multilevel Sensor" + }, + "specific": { + "key": 1, + "label": "Routing Multilevel Sensor" + }, + "mandatorySupportedCCs": [32, 49], + "mandatoryControlledCCs": [] + }, + "commandClasses": [ + { + "id": 49, + "name": "Multilevel Sensor", + "version": 11, + "isSecure": true + }, + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 152, + "name": "Security", + "version": 1, + "isSecure": true + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 3, + "isSecure": true + }, + { + "id": 142, + "name": "Multi Channel Association", + "version": 3, + "isSecure": true + } + ] + } + ] +} diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index 9c4a6339a78..aa20bd3bb84 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -457,7 +457,7 @@ async def test_node_metadata( assert msg["error"]["code"] == ERR_NOT_LOADED -async def test_node_comments( +async def test_node_alerts( hass: HomeAssistant, wallmote_central_scene, integration, @@ -473,13 +473,14 @@ async def test_node_comments( await ws_client.send_json( { ID: 3, - TYPE: "zwave_js/node_comments", + TYPE: "zwave_js/node_alerts", DEVICE_ID: device.id, } ) msg = await ws_client.receive_json() result = msg["result"] assert result["comments"] == [{"level": "info", "text": "test"}] + assert result["is_embedded"] async def test_add_node( diff --git a/tests/components/zwave_js/test_climate.py b/tests/components/zwave_js/test_climate.py index e9040dfd397..d5619ff014c 100644 --- a/tests/components/zwave_js/test_climate.py +++ b/tests/components/zwave_js/test_climate.py @@ -415,6 +415,77 @@ async def test_setpoint_thermostat( client.async_send_command_no_wait.reset_mock() +async def test_thermostat_heatit_z_trm6( + hass: HomeAssistant, client, climate_heatit_z_trm6, integration +) -> None: + """Test a heatit Z-TRM6 entity.""" + node = climate_heatit_z_trm6 + state = hass.states.get(CLIMATE_FLOOR_THERMOSTAT_ENTITY) + + assert state + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_HVAC_MODES] == [ + HVACMode.OFF, + HVACMode.HEAT, + HVACMode.COOL, + ] + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 22.5 + assert state.attributes[ATTR_TEMPERATURE] == 19 + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE + ) + assert state.attributes[ATTR_MIN_TEMP] == 5 + assert state.attributes[ATTR_MAX_TEMP] == 40 + + # Try switching to external sensor (not connected so defaults to 0) + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": 101, + "args": { + "commandClassName": "Configuration", + "commandClass": 112, + "endpoint": 0, + "property": 2, + "propertyName": "Sensor mode", + "newValue": 4, + "prevValue": 2, + }, + }, + ) + node.receive_event(event) + state = hass.states.get(CLIMATE_FLOOR_THERMOSTAT_ENTITY) + assert state + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 0 + + # Try switching to floor sensor + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": 101, + "args": { + "commandClassName": "Configuration", + "commandClass": 112, + "endpoint": 0, + "property": 2, + "propertyName": "Sensor mode", + "newValue": 0, + "prevValue": 4, + }, + }, + ) + node.receive_event(event) + state = hass.states.get(CLIMATE_FLOOR_THERMOSTAT_ENTITY) + assert state + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 21.9 + + async def test_thermostat_heatit_z_trm3_no_value( hass: HomeAssistant, client, climate_heatit_z_trm3_no_value, integration ) -> None: diff --git a/tests/components/zwave_js/test_device_trigger.py b/tests/components/zwave_js/test_device_trigger.py index ba0bbbe087d..f9615c84e1d 100644 --- a/tests/components/zwave_js/test_device_trigger.py +++ b/tests/components/zwave_js/test_device_trigger.py @@ -158,15 +158,13 @@ async def test_if_notification_notification_fires( node.receive_event(event) await hass.async_block_till_done() assert len(calls) == 2 - assert calls[0].data[ - "some" - ] == "event.notification.notification - device - zwave_js_notification - {}".format( - CommandClass.NOTIFICATION + assert ( + calls[0].data["some"] + == f"event.notification.notification - device - zwave_js_notification - {CommandClass.NOTIFICATION}" ) - assert calls[1].data[ - "some" - ] == "event.notification.notification2 - device - zwave_js_notification - {}".format( - CommandClass.NOTIFICATION + assert ( + calls[1].data["some"] + == f"event.notification.notification2 - device - zwave_js_notification - {CommandClass.NOTIFICATION}" ) @@ -288,15 +286,13 @@ async def test_if_entry_control_notification_fires( node.receive_event(event) await hass.async_block_till_done() assert len(calls) == 2 - assert calls[0].data[ - "some" - ] == "event.notification.notification - device - zwave_js_notification - {}".format( - CommandClass.ENTRY_CONTROL + assert ( + calls[0].data["some"] + == f"event.notification.notification - device - zwave_js_notification - {CommandClass.ENTRY_CONTROL}" ) - assert calls[1].data[ - "some" - ] == "event.notification.notification2 - device - zwave_js_notification - {}".format( - CommandClass.ENTRY_CONTROL + assert ( + calls[1].data["some"] + == f"event.notification.notification2 - device - zwave_js_notification - {CommandClass.ENTRY_CONTROL}" ) @@ -705,15 +701,13 @@ async def test_if_basic_value_notification_fires( node.receive_event(event) await hass.async_block_till_done() assert len(calls) == 2 - assert calls[0].data[ - "some" - ] == "event.value_notification.basic - device - zwave_js_value_notification - {}".format( - CommandClass.BASIC + assert ( + calls[0].data["some"] + == f"event.value_notification.basic - device - zwave_js_value_notification - {CommandClass.BASIC}" ) - assert calls[1].data[ - "some" - ] == "event.value_notification.basic2 - device - zwave_js_value_notification - {}".format( - CommandClass.BASIC + assert ( + calls[1].data["some"] + == f"event.value_notification.basic2 - device - zwave_js_value_notification - {CommandClass.BASIC}" ) @@ -888,15 +882,13 @@ async def test_if_central_scene_value_notification_fires( node.receive_event(event) await hass.async_block_till_done() assert len(calls) == 2 - assert calls[0].data[ - "some" - ] == "event.value_notification.central_scene - device - zwave_js_value_notification - {}".format( - CommandClass.CENTRAL_SCENE + assert ( + calls[0].data["some"] + == f"event.value_notification.central_scene - device - zwave_js_value_notification - {CommandClass.CENTRAL_SCENE}" ) - assert calls[1].data[ - "some" - ] == "event.value_notification.central_scene2 - device - zwave_js_value_notification - {}".format( - CommandClass.CENTRAL_SCENE + assert ( + calls[1].data["some"] + == f"event.value_notification.central_scene2 - device - zwave_js_value_notification - {CommandClass.CENTRAL_SCENE}" ) @@ -1064,15 +1056,13 @@ async def test_if_scene_activation_value_notification_fires( node.receive_event(event) await hass.async_block_till_done() assert len(calls) == 2 - assert calls[0].data[ - "some" - ] == "event.value_notification.scene_activation - device - zwave_js_value_notification - {}".format( - CommandClass.SCENE_ACTIVATION + assert ( + calls[0].data["some"] + == f"event.value_notification.scene_activation - device - zwave_js_value_notification - {CommandClass.SCENE_ACTIVATION}" ) - assert calls[1].data[ - "some" - ] == "event.value_notification.scene_activation2 - device - zwave_js_value_notification - {}".format( - CommandClass.SCENE_ACTIVATION + assert ( + calls[1].data["some"] + == f"event.value_notification.scene_activation2 - device - zwave_js_value_notification - {CommandClass.SCENE_ACTIVATION}" ) diff --git a/tests/components/zwave_js/test_discovery.py b/tests/components/zwave_js/test_discovery.py index cbaa27c2a91..569e36d3b5c 100644 --- a/tests/components/zwave_js/test_discovery.py +++ b/tests/components/zwave_js/test_discovery.py @@ -87,6 +87,7 @@ async def test_lock_popp_electric_strike_lock_control( hass.states.get("binary_sensor.node_62_the_current_status_of_the_door") is not None ) + assert hass.states.get("select.node_62_current_lock_mode") is not None async def test_fortrez_ssa3_siren( diff --git a/tests/components/zwave_js/test_fan.py b/tests/components/zwave_js/test_fan.py index 92141eec3ff..c26a5366d37 100644 --- a/tests/components/zwave_js/test_fan.py +++ b/tests/components/zwave_js/test_fan.py @@ -536,13 +536,14 @@ async def test_inovelli_lzw36( assert args["value"] == 1 client.async_send_command.reset_mock() - with pytest.raises(NotValidPresetModeError): + with pytest.raises(NotValidPresetModeError) as exc: await hass.services.async_call( "fan", "turn_on", {"entity_id": entity_id, "preset_mode": "wheeze"}, blocking=True, ) + assert exc.value.translation_key == "not_valid_preset_mode" assert len(client.async_send_command.call_args_list) == 0 @@ -675,13 +676,14 @@ async def test_thermostat_fan( client.async_send_command.reset_mock() # Test setting unknown preset mode - with pytest.raises(ValueError): + with pytest.raises(NotValidPresetModeError) as exc: await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "Turbo"}, blocking=True, ) + assert exc.value.translation_key == "not_valid_preset_mode" client.async_send_command.reset_mock() diff --git a/tests/components/zwave_js/test_init.py b/tests/components/zwave_js/test_init.py index c57e3b1f868..bf015a70676 100644 --- a/tests/components/zwave_js/test_init.py +++ b/tests/components/zwave_js/test_init.py @@ -967,7 +967,7 @@ async def test_removed_device( # Check how many entities there are ent_reg = er.async_get(hass) entity_entries = er.async_entries_for_config_entry(ent_reg, integration.entry_id) - assert len(entity_entries) == 92 + assert len(entity_entries) == 93 # Remove a node and reload the entry old_node = driver.controller.nodes.pop(13) @@ -979,7 +979,7 @@ async def test_removed_device( device_entries = dr.async_entries_for_config_entry(dev_reg, integration.entry_id) assert len(device_entries) == 2 entity_entries = er.async_entries_for_config_entry(ent_reg, integration.entry_id) - assert len(entity_entries) == 61 + assert len(entity_entries) == 62 assert ( dev_reg.async_get_device(identifiers={get_device_id(driver, old_node)}) is None ) diff --git a/tests/components/zwave_js/test_lock.py b/tests/components/zwave_js/test_lock.py index 5a5711d9dad..2213e9cf069 100644 --- a/tests/components/zwave_js/test_lock.py +++ b/tests/components/zwave_js/test_lock.py @@ -15,10 +15,15 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_UNLOCK, ) -from homeassistant.components.zwave_js.const import DOMAIN as ZWAVE_JS_DOMAIN +from homeassistant.components.zwave_js.const import ( + ATTR_LOCK_TIMEOUT, + ATTR_OPERATION_TYPE, + DOMAIN as ZWAVE_JS_DOMAIN, +) from homeassistant.components.zwave_js.helpers import ZwaveValueMatcher from homeassistant.components.zwave_js.lock import ( SERVICE_CLEAR_LOCK_USERCODE, + SERVICE_SET_LOCK_CONFIGURATION, SERVICE_SET_LOCK_USERCODE, ) from homeassistant.const import ( @@ -35,7 +40,11 @@ from .common import SCHLAGE_BE469_LOCK_ENTITY, replace_value_of_zwave_value async def test_door_lock( - hass: HomeAssistant, client, lock_schlage_be469, integration + hass: HomeAssistant, + client, + lock_schlage_be469, + integration, + caplog: pytest.LogCaptureFixture, ) -> None: """Test a lock entity with door lock command class.""" node = lock_schlage_be469 @@ -158,6 +167,96 @@ async def test_door_lock( client.async_send_command.reset_mock() + # Test set configuration + client.async_send_command.return_value = { + "response": {"status": 1, "remainingDuration": "default"} + } + caplog.clear() + await hass.services.async_call( + ZWAVE_JS_DOMAIN, + SERVICE_SET_LOCK_CONFIGURATION, + { + ATTR_ENTITY_ID: SCHLAGE_BE469_LOCK_ENTITY, + ATTR_OPERATION_TYPE: "timed", + ATTR_LOCK_TIMEOUT: 1, + }, + blocking=True, + ) + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "endpoint.invoke_cc_api" + assert args["nodeId"] == 20 + assert args["endpoint"] == 0 + assert args["args"] == [ + { + "insideHandlesCanOpenDoorConfiguration": [True, True, True, True], + "operationType": 2, + "outsideHandlesCanOpenDoorConfiguration": [True, True, True, True], + } + ] + assert args["commandClass"] == 98 + assert args["methodName"] == "setConfiguration" + assert "Result status" in caplog.text + assert "remaining duration" in caplog.text + assert "setting lock configuration" in caplog.text + + client.async_send_command.reset_mock() + client.async_send_command_no_wait.reset_mock() + caplog.clear() + + # Put node to sleep and validate that we don't wait for a return or log anything + event = Event( + "sleep", + { + "source": "node", + "event": "sleep", + "nodeId": node.node_id, + }, + ) + node.receive_event(event) + + await hass.services.async_call( + ZWAVE_JS_DOMAIN, + SERVICE_SET_LOCK_CONFIGURATION, + { + ATTR_ENTITY_ID: SCHLAGE_BE469_LOCK_ENTITY, + ATTR_OPERATION_TYPE: "timed", + ATTR_LOCK_TIMEOUT: 1, + }, + blocking=True, + ) + + assert len(client.async_send_command.call_args_list) == 0 + assert len(client.async_send_command_no_wait.call_args_list) == 1 + args = client.async_send_command_no_wait.call_args[0][0] + assert args["command"] == "endpoint.invoke_cc_api" + assert args["nodeId"] == 20 + assert args["endpoint"] == 0 + assert args["args"] == [ + { + "insideHandlesCanOpenDoorConfiguration": [True, True, True, True], + "operationType": 2, + "outsideHandlesCanOpenDoorConfiguration": [True, True, True, True], + } + ] + assert args["commandClass"] == 98 + assert args["methodName"] == "setConfiguration" + assert "Result status" not in caplog.text + assert "remaining duration" not in caplog.text + assert "setting lock configuration" not in caplog.text + + # Mark node as alive + event = Event( + "alive", + { + "source": "node", + "event": "alive", + "nodeId": node.node_id, + }, + ) + node.receive_event(event) + client.async_send_command.side_effect = FailedZWaveCommand("test", 1, "test") # Test set usercode service error handling with pytest.raises(HomeAssistantError): diff --git a/tests/components/zwave_js/test_select.py b/tests/components/zwave_js/test_select.py index c63f0c429fd..1cbdb8799f3 100644 --- a/tests/components/zwave_js/test_select.py +++ b/tests/components/zwave_js/test_select.py @@ -320,3 +320,30 @@ async def test_config_parameter_select( state = hass.states.get(select_entity_id) assert state assert state.state == "Normal" + + +async def test_lock_popp_electric_strike_lock_control_select( + hass: HomeAssistant, client, lock_popp_electric_strike_lock_control, integration +) -> None: + """Test that the Popp Electric Strike Lock Control select entity.""" + LOCK_SELECT_ENTITY = "select.node_62_current_lock_mode" + state = hass.states.get(LOCK_SELECT_ENTITY) + assert state + assert state.state == "Unsecured" + await hass.services.async_call( + "select", + "select_option", + {"entity_id": LOCK_SELECT_ENTITY, "option": "UnsecuredWithTimeout"}, + blocking=True, + ) + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == lock_popp_electric_strike_lock_control.node_id + assert args["valueId"] == { + "endpoint": 0, + "commandClass": 98, + "property": "targetMode", + } + assert args["value"] == 1 diff --git a/tests/conftest.py b/tests/conftest.py index 09ad70bfcf1..fcd8e8b73a9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -383,7 +383,7 @@ def reset_hass_threading_local_object() -> Generator[None, None, None]: ha._hass.__dict__.clear() -@pytest.fixture(autouse=True) +@pytest.fixture(scope="session", autouse=True) def bcrypt_cost() -> Generator[None, None, None]: """Run with reduced rounds during tests, to speed up uses.""" import bcrypt @@ -1145,13 +1145,19 @@ def mock_zeroconf() -> Generator[None, None, None]: @pytest.fixture def mock_async_zeroconf(mock_zeroconf: None) -> Generator[None, None, None]: """Mock AsyncZeroconf.""" - from zeroconf import DNSCache # pylint: disable=import-outside-toplevel + from zeroconf import DNSCache, Zeroconf # pylint: disable=import-outside-toplevel + from zeroconf.asyncio import ( # pylint: disable=import-outside-toplevel + AsyncZeroconf, + ) - with patch("homeassistant.components.zeroconf.HaAsyncZeroconf") as mock_aiozc: + with patch( + "homeassistant.components.zeroconf.HaAsyncZeroconf", spec=AsyncZeroconf + ) as mock_aiozc: zc = mock_aiozc.return_value zc.async_unregister_service = AsyncMock() zc.async_register_service = AsyncMock() zc.async_update_service = AsyncMock() + zc.zeroconf = Mock(spec=Zeroconf) zc.zeroconf.async_wait_for_start = AsyncMock() # DNSCache has strong Cython type checks, and MagicMock does not work # so we must mock the class directly @@ -1538,7 +1544,7 @@ async def mock_enable_bluetooth( await hass.async_block_till_done() -@pytest.fixture +@pytest.fixture(scope="session") def mock_bluetooth_adapters() -> Generator[None, None, None]: """Fixture to mock bluetooth adapters.""" with patch( diff --git a/tests/fixtures/core/config/component_validation/basic/configuration.yaml b/tests/fixtures/core/config/component_validation/basic/configuration.yaml index 5b3aacd9523..9c3d1eb190b 100644 --- a/tests/fixtures/core/config/component_validation/basic/configuration.yaml +++ b/tests/fixtures/core/config/component_validation/basic/configuration.yaml @@ -2,11 +2,22 @@ iot_domain: # This is correct and should not generate errors - platform: non_adr_0007 option1: abc - # This violates the non_adr_0007.iot_domain platform schema + # This violates the iot_domain platform schema (platform missing) + - paltfrom: non_adr_0007 + # This violates the non_adr_0007.iot_domain platform schema (option1 wrong type) - platform: non_adr_0007 option1: 123 - # This violates the iot_domain platform schema - - paltfrom: non_adr_0007 + # This violates the non_adr_0007.iot_domain platform schema (no_such_option does not exist) + - platform: non_adr_0007 + no_such_option: abc + option1: abc + # This violates the non_adr_0007.iot_domain platform schema: + # - no_such_option does not exist + # - option1 is missing + # - option2 is wrong type + - platform: non_adr_0007 + no_such_option: abc + option2: 123 # This is correct and should not generate errors adr_0007_1: @@ -19,3 +30,29 @@ adr_0007_2: adr_0007_3: host: blah.com port: foo + +# no_such_option does not exist +adr_0007_4: + host: blah.com + no_such_option: foo + +# Multiple errors: +# - host is missing +# - no_such_option does not exist +# - port is wrong type +adr_0007_5: + no_such_option: foo + port: foo + +# This is correct and should not generate errors +custom_validator_ok_1: + host: blah.com + +# Host is missing +custom_validator_ok_2: + +# This always raises HomeAssistantError +custom_validator_bad_1: + +# This always raises ValueError +custom_validator_bad_2: diff --git a/tests/fixtures/core/config/component_validation/basic_include/configuration.yaml b/tests/fixtures/core/config/component_validation/basic_include/configuration.yaml index ab86a6b34da..5744e3005fa 100644 --- a/tests/fixtures/core/config/component_validation/basic_include/configuration.yaml +++ b/tests/fixtures/core/config/component_validation/basic_include/configuration.yaml @@ -2,3 +2,9 @@ iot_domain: !include integrations/iot_domain.yaml adr_0007_1: !include integrations/adr_0007_1.yaml adr_0007_2: !include integrations/adr_0007_2.yaml adr_0007_3: !include integrations/adr_0007_3.yaml +adr_0007_4: !include integrations/adr_0007_4.yaml +adr_0007_5: !include integrations/adr_0007_5.yaml +custom_validator_ok_1: !include integrations/custom_validator_ok_1.yaml +custom_validator_ok_2: !include integrations/custom_validator_ok_2.yaml +custom_validator_bad_1: !include integrations/custom_validator_bad_1.yaml +custom_validator_bad_2: !include integrations/custom_validator_bad_2.yaml diff --git a/tests/fixtures/core/config/component_validation/basic_include/integrations/adr_0007_4.yaml b/tests/fixtures/core/config/component_validation/basic_include/integrations/adr_0007_4.yaml new file mode 100644 index 00000000000..e8dcd8f4017 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/basic_include/integrations/adr_0007_4.yaml @@ -0,0 +1,3 @@ +# no_such_option does not exist +host: blah.com +no_such_option: foo diff --git a/tests/fixtures/core/config/component_validation/basic_include/integrations/adr_0007_5.yaml b/tests/fixtures/core/config/component_validation/basic_include/integrations/adr_0007_5.yaml new file mode 100644 index 00000000000..0cda3d04a55 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/basic_include/integrations/adr_0007_5.yaml @@ -0,0 +1,6 @@ +# Multiple errors: +# - host is missing +# - no_such_option does not exist +# - port is wrong type +no_such_option: foo +port: foo diff --git a/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_bad_1.yaml b/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_bad_1.yaml new file mode 100644 index 00000000000..12d6d869f35 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_bad_1.yaml @@ -0,0 +1 @@ +# This always raises HomeAssistantError diff --git a/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_bad_2.yaml b/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_bad_2.yaml new file mode 100644 index 00000000000..7af4b20c016 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_bad_2.yaml @@ -0,0 +1 @@ +# This always raises ValueError diff --git a/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_ok_1.yaml b/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_ok_1.yaml new file mode 100644 index 00000000000..d246d73c257 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_ok_1.yaml @@ -0,0 +1,2 @@ +# This is correct and should not generate errors +host: blah.com diff --git a/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_ok_2.yaml b/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_ok_2.yaml new file mode 100644 index 00000000000..8b592b01e2d --- /dev/null +++ b/tests/fixtures/core/config/component_validation/basic_include/integrations/custom_validator_ok_2.yaml @@ -0,0 +1 @@ +# Host is missing diff --git a/tests/fixtures/core/config/component_validation/basic_include/integrations/iot_domain.yaml b/tests/fixtures/core/config/component_validation/basic_include/integrations/iot_domain.yaml index 405fc3aab91..dd592194f1a 100644 --- a/tests/fixtures/core/config/component_validation/basic_include/integrations/iot_domain.yaml +++ b/tests/fixtures/core/config/component_validation/basic_include/integrations/iot_domain.yaml @@ -1,8 +1,19 @@ # This is correct and should not generate errors - platform: non_adr_0007 option1: abc -# This violates the non_adr_0007.iot_domain platform schema +# This violates the iot_domain platform schema (platform missing) +- paltfrom: non_adr_0007 +# This violates the non_adr_0007.iot_domain platform schema (option1 wrong type) - platform: non_adr_0007 option1: 123 -# This violates the iot_domain platform schema -- paltfrom: non_adr_0007 +# This violates the non_adr_0007.iot_domain platform schema (no_such_option does not exist) +- platform: non_adr_0007 + no_such_option: abc + option1: abc +# This violates the non_adr_0007.iot_domain platform schema: +# - no_such_option does not exist +# - option1 is missing +# - option2 is wrong type +- platform: non_adr_0007 + no_such_option: abc + option2: 123 diff --git a/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_2.yaml b/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_2.yaml index f4d009c8cfa..f6c3219741e 100644 --- a/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_2.yaml +++ b/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_2.yaml @@ -1,3 +1,2 @@ -# This violates the non_adr_0007.iot_domain platform schema -platform: non_adr_0007 -option1: 123 +# This violates the iot_domain platform schema (platform missing) +paltfrom: non_adr_0007 diff --git a/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_3.yaml b/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_3.yaml index 94c18721061..2265e8c2f07 100644 --- a/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_3.yaml +++ b/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_3.yaml @@ -1,2 +1,3 @@ -# This violates the iot_domain platform schema -paltfrom: non_adr_0007 +# This violates the non_adr_0007.iot_domain platform schema (option1 wrong type) +platform: non_adr_0007 +option1: 123 diff --git a/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_4.yaml b/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_4.yaml new file mode 100644 index 00000000000..53f220472e2 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_4.yaml @@ -0,0 +1,4 @@ +# This violates the non_adr_0007.iot_domain platform schema (no_such_option does not exist) +platform: non_adr_0007 +no_such_option: abc +option1: abc diff --git a/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_5.yaml b/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_5.yaml new file mode 100644 index 00000000000..b0fec6d5046 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_5.yaml @@ -0,0 +1,7 @@ +# This violates the non_adr_0007.iot_domain platform schema: +# - no_such_option does not exist +# - option1 is missing +# - option2 is wrong type +platform: non_adr_0007 +no_such_option: abc +option2: 123 diff --git a/tests/fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_1.yaml b/tests/fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_1.yaml index a0636cdecf4..172f96e2da2 100644 --- a/tests/fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_1.yaml +++ b/tests/fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_1.yaml @@ -1,3 +1,5 @@ # This is correct and should not generate errors - platform: non_adr_0007 option1: abc +# This violates the iot_domain platform schema (platform missing) +- paltfrom: non_adr_0007 diff --git a/tests/fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_2.yaml b/tests/fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_2.yaml index 16df25adcd7..f8ef2b5643b 100644 --- a/tests/fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_2.yaml +++ b/tests/fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_2.yaml @@ -1,5 +1,14 @@ -# This violates the non_adr_0007.iot_domain platform schema +# This violates the non_adr_0007.iot_domain platform schema (option1 wrong type) - platform: non_adr_0007 option1: 123 - # This violates the iot_domain platform schema -- paltfrom: non_adr_0007 +# This violates the non_adr_0007.iot_domain platform schema (no_such_option does not exist) +- platform: non_adr_0007 + no_such_option: abc + option1: abc +# This violates the non_adr_0007.iot_domain platform schema: +# - no_such_option does not exist +# - option1 is missing +# - option2 is wrong type +- platform: non_adr_0007 + no_such_option: abc + option2: 123 diff --git a/tests/fixtures/core/config/component_validation/packages/configuration.yaml b/tests/fixtures/core/config/component_validation/packages/configuration.yaml index 5b3cf74615a..b8116b5988e 100644 --- a/tests/fixtures/core/config/component_validation/packages/configuration.yaml +++ b/tests/fixtures/core/config/component_validation/packages/configuration.yaml @@ -1,28 +1,70 @@ homeassistant: packages: - pack_1: + pack_iot_domain_1: iot_domain: # This is correct and should not generate errors - platform: non_adr_0007 option1: abc - pack_2: + pack_iot_domain_2: iot_domain: - # This violates the non_adr_0007.iot_domain platform schema + # This violates the iot_domain platform schema (platform missing) + - paltfrom: non_adr_0007 + pack_iot_domain_3: + iot_domain: + # This violates the non_adr_0007.iot_domain platform schema (option1 wrong type) - platform: non_adr_0007 option1: 123 - pack_3: + pack_iot_domain_4: iot_domain: - # This violates the iot_domain platform schema - - paltfrom: non_adr_0007 - pack_4: + # This violates the non_adr_0007.iot_domain platform schema (no_such_option does not exist) + - platform: non_adr_0007 + no_such_option: abc + option1: abc + pack_iot_domain_5: + iot_domain: + # This violates the non_adr_0007.iot_domain platform schema: + # - no_such_option does not exist + # - option1 is missing + # - option2 is wrong type + - platform: non_adr_0007 + no_such_option: abc + option2: 123 + pack_adr_0007_1: # This is correct and should not generate errors adr_0007_1: host: blah.com - pack_5: + pack_adr_0007_2: # Host is missing adr_0007_2: - pack_6: + pack_adr_0007_3: # Port is wrong type adr_0007_3: host: blah.com port: foo + pack_adr_0007_4: + # no_such_option does not exist + adr_0007_4: + host: blah.com + no_such_option: foo + pack_adr_0007_5: + # Multiple errors: + # - host is missing + # - no_such_option does not exist + # - port is wrong type + adr_0007_5: + no_such_option: foo + port: foo + + pack_custom_validator_ok_1: + # This is correct and should not generate errors + custom_validator_ok_1: + host: blah.com + pack_custom_validator_ok_2: + # Host is missing + custom_validator_ok_2: + pack_custom_validator_bad_1: + # This always raises HomeAssistantError + custom_validator_bad_1: + pack_custom_validator_bad_2: + # This always raises ValueError + custom_validator_bad_2: diff --git a/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/adr_0007_4.yaml b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/adr_0007_4.yaml new file mode 100644 index 00000000000..b5d4602c683 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/adr_0007_4.yaml @@ -0,0 +1,4 @@ +# no_such_option does not exist +adr_0007_4: + host: blah.com + no_such_option: foo diff --git a/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/adr_0007_5.yaml b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/adr_0007_5.yaml new file mode 100644 index 00000000000..fad2c53d527 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/adr_0007_5.yaml @@ -0,0 +1,7 @@ +# Multiple errors: +# - host is missing +# - no_such_option does not exist +# - port is wrong type +adr_0007_5: + no_such_option: foo + port: foo diff --git a/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_bad_1.yaml b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_bad_1.yaml new file mode 100644 index 00000000000..2e17b766800 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_bad_1.yaml @@ -0,0 +1,2 @@ +# This always raises HomeAssistantError +custom_validator_bad_1: diff --git a/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_bad_2.yaml b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_bad_2.yaml new file mode 100644 index 00000000000..213c3ea03f8 --- /dev/null +++ b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_bad_2.yaml @@ -0,0 +1,2 @@ +# This always raises ValueError +custom_validator_bad_2: diff --git a/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_ok_1.yaml b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_ok_1.yaml new file mode 100644 index 00000000000..257ff66d10b --- /dev/null +++ b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_ok_1.yaml @@ -0,0 +1,3 @@ +# This is correct and should not generate errors +custom_validator_ok_1: + host: blah.com diff --git a/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_ok_2.yaml b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_ok_2.yaml new file mode 100644 index 00000000000..59a240defaf --- /dev/null +++ b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/custom_validator_ok_2.yaml @@ -0,0 +1,2 @@ +# Host is missing +custom_validator_ok_2: diff --git a/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/iot_domain.yaml b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/iot_domain.yaml index 8c366297165..e137411b0fc 100644 --- a/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/iot_domain.yaml +++ b/tests/fixtures/core/config/component_validation/packages_include_dir_named/integrations/iot_domain.yaml @@ -2,8 +2,19 @@ iot_domain: # This is correct and should not generate errors - platform: non_adr_0007 option1: abc - # This violates the non_adr_0007.iot_domain platform schema + # This violates the iot_domain platform schema (platform missing) + - paltfrom: non_adr_0007 + # This violates the non_adr_0007.iot_domain platform schema (option1 wrong type) - platform: non_adr_0007 option1: 123 - # This violates the iot_domain platform schema - - paltfrom: non_adr_0007 + # This violates the non_adr_0007.iot_domain platform schema (no_such_option does not exist) + - platform: non_adr_0007 + no_such_option: abc + option1: abc + # This violates the non_adr_0007.iot_domain platform schema: + # - no_such_option does not exist + # - option1 is missing + # - option2 is wrong type + - platform: non_adr_0007 + no_such_option: abc + option2: 123 diff --git a/tests/fixtures/core/config/package_errors/packages/configuration.yaml b/tests/fixtures/core/config/package_errors/packages/configuration.yaml index 498eca0edac..19ec6e1e983 100644 --- a/tests/fixtures/core/config/package_errors/packages/configuration.yaml +++ b/tests/fixtures/core/config/package_errors/packages/configuration.yaml @@ -19,3 +19,6 @@ homeassistant: pack_4: adr_0007_3: host: blah.com + pack_5: + unknown_integration: + host: blah.com diff --git a/tests/fixtures/core/config/package_errors/packages_include_dir_named/integrations/unknown_integration.yaml b/tests/fixtures/core/config/package_errors/packages_include_dir_named/integrations/unknown_integration.yaml new file mode 100644 index 00000000000..d041b77ea29 --- /dev/null +++ b/tests/fixtures/core/config/package_errors/packages_include_dir_named/integrations/unknown_integration.yaml @@ -0,0 +1,3 @@ +# Unknown integration +unknown_integration: + host: blah.com diff --git a/tests/fixtures/core/config/package_exceptions/packages/configuration.yaml b/tests/fixtures/core/config/package_exceptions/packages/configuration.yaml new file mode 100644 index 00000000000..bf2a79c1307 --- /dev/null +++ b/tests/fixtures/core/config/package_exceptions/packages/configuration.yaml @@ -0,0 +1,4 @@ +homeassistant: + packages: + pack_1: + test_domain: diff --git a/tests/fixtures/core/config/package_exceptions/packages_include_dir_named/configuration.yaml b/tests/fixtures/core/config/package_exceptions/packages_include_dir_named/configuration.yaml new file mode 100644 index 00000000000..d3b52e4d49d --- /dev/null +++ b/tests/fixtures/core/config/package_exceptions/packages_include_dir_named/configuration.yaml @@ -0,0 +1,3 @@ +homeassistant: + # Load packages + packages: !include_dir_named integrations diff --git a/tests/fixtures/core/config/package_exceptions/packages_include_dir_named/integrations/unknown_integration.yaml b/tests/fixtures/core/config/package_exceptions/packages_include_dir_named/integrations/unknown_integration.yaml new file mode 100644 index 00000000000..66a70375f70 --- /dev/null +++ b/tests/fixtures/core/config/package_exceptions/packages_include_dir_named/integrations/unknown_integration.yaml @@ -0,0 +1 @@ +test_domain: diff --git a/tests/helpers/test_aiohttp_compat.py b/tests/helpers/test_aiohttp_compat.py deleted file mode 100644 index 749984dbc2e..00000000000 --- a/tests/helpers/test_aiohttp_compat.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Test the aiohttp compatibility shim.""" - -import asyncio -from contextlib import suppress - -from aiohttp import client, web, web_protocol, web_server -import pytest - -from homeassistant.helpers.aiohttp_compat import CancelOnDisconnectRequestHandler - - -@pytest.mark.allow_hosts(["127.0.0.1"]) -async def test_handler_cancellation(socket_enabled, unused_tcp_port_factory) -> None: - """Test that handler cancels the request on disconnect. - - From aiohttp tests/test_web_server.py - """ - assert web_protocol.RequestHandler is CancelOnDisconnectRequestHandler - assert web_server.RequestHandler is CancelOnDisconnectRequestHandler - - event = asyncio.Event() - port = unused_tcp_port_factory() - - async def on_request(_: web.Request) -> web.Response: - nonlocal event - try: - await asyncio.sleep(10) - except asyncio.CancelledError: - event.set() - raise - else: - raise web.HTTPInternalServerError() - - app = web.Application() - app.router.add_route("GET", "/", on_request) - - runner = web.AppRunner(app) - await runner.setup() - - site = web.TCPSite(runner, host="127.0.0.1", port=port) - - await site.start() - - try: - async with client.ClientSession( - timeout=client.ClientTimeout(total=0.1) - ) as sess: - with pytest.raises(asyncio.TimeoutError): - await sess.get(f"http://127.0.0.1:{port}/") - - with suppress(asyncio.TimeoutError): - await asyncio.wait_for(event.wait(), timeout=1) - assert event.is_set(), "Request handler hasn't been cancelled" - finally: - await asyncio.gather(runner.shutdown(), site.stop()) diff --git a/tests/helpers/test_check_config.py b/tests/helpers/test_check_config.py index a62bd8b39e4..b65f09aeaf9 100644 --- a/tests/helpers/test_check_config.py +++ b/tests/helpers/test_check_config.py @@ -7,6 +7,7 @@ import voluptuous as vol from homeassistant.config import YAML_CONFIG_FILE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.check_config import ( CheckConfigError, HomeAssistantConfig, @@ -81,9 +82,8 @@ async def test_bad_core_config(hass: HomeAssistant) -> None: error = CheckConfigError( ( - "Invalid config for [homeassistant]: not a valid value for dictionary " - "value @ data['unit_system']. Got 'bad'. (See " - f"{hass.config.path(YAML_CONFIG_FILE)}, line 2)." + f"Invalid config for 'homeassistant' at {YAML_CONFIG_FILE}, line 2:" + " not a valid value for dictionary value 'unit_system', got 'bad'" ), "homeassistant", {"unit_system": "bad"}, @@ -103,8 +103,8 @@ async def test_config_platform_valid(hass: HomeAssistant) -> None: _assert_warnings_errors(res, [], []) -async def test_component_platform_not_found(hass: HomeAssistant) -> None: - """Test errors if component or platform not found.""" +async def test_integration_not_found(hass: HomeAssistant) -> None: + """Test errors if integration not found.""" # Make sure they don't exist files = {YAML_CONFIG_FILE: BASE_CONFIG + "beer:"} with patch("os.path.isfile", return_value=True), patch_yaml_files(files): @@ -118,8 +118,8 @@ async def test_component_platform_not_found(hass: HomeAssistant) -> None: _assert_warnings_errors(res, [warning], []) -async def test_component_requirement_not_found(hass: HomeAssistant) -> None: - """Test errors if component with a requirement not found not found.""" +async def test_integrationt_requirement_not_found(hass: HomeAssistant) -> None: + """Test errors if integration with a requirement not found not found.""" # Make sure they don't exist files = {YAML_CONFIG_FILE: BASE_CONFIG + "test_custom_component:"} with patch( @@ -141,8 +141,8 @@ async def test_component_requirement_not_found(hass: HomeAssistant) -> None: _assert_warnings_errors(res, [warning], []) -async def test_component_not_found_recovery_mode(hass: HomeAssistant) -> None: - """Test no errors if component not found in recovery mode.""" +async def test_integration_not_found_recovery_mode(hass: HomeAssistant) -> None: + """Test no errors if integration not found in recovery mode.""" # Make sure they don't exist files = {YAML_CONFIG_FILE: BASE_CONFIG + "beer:"} hass.config.recovery_mode = True @@ -154,8 +154,8 @@ async def test_component_not_found_recovery_mode(hass: HomeAssistant) -> None: _assert_warnings_errors(res, [], []) -async def test_component_not_found_safe_mode(hass: HomeAssistant) -> None: - """Test no errors if component not found in safe mode.""" +async def test_integration_not_found_safe_mode(hass: HomeAssistant) -> None: + """Test no errors if integration not found in safe mode.""" # Make sure they don't exist files = {YAML_CONFIG_FILE: BASE_CONFIG + "beer:"} hass.config.safe_mode = True @@ -167,8 +167,8 @@ async def test_component_not_found_safe_mode(hass: HomeAssistant) -> None: _assert_warnings_errors(res, [], []) -async def test_component_import_error(hass: HomeAssistant) -> None: - """Test errors if component with a requirement not found not found.""" +async def test_integration_import_error(hass: HomeAssistant) -> None: + """Test errors if integration with a requirement not found not found.""" # Make sure they don't exist files = {YAML_CONFIG_FILE: BASE_CONFIG + "light:"} with patch( @@ -188,19 +188,19 @@ async def test_component_import_error(hass: HomeAssistant) -> None: @pytest.mark.parametrize( - ("component", "errors", "warnings", "message"), + ("integration", "errors", "warnings", "message"), [ - ("frontend", 1, 0, "[blah] is an invalid option for [frontend]"), - ("http", 1, 0, "[blah] is an invalid option for [http]"), - ("logger", 0, 1, "[blah] is an invalid option for [logger]"), + ("frontend", 1, 0, "'blah' is an invalid option for 'frontend'"), + ("http", 1, 0, "'blah' is an invalid option for 'http'"), + ("logger", 0, 1, "'blah' is an invalid option for 'logger'"), ], ) -async def test_component_schema_error( - hass: HomeAssistant, component: str, errors: int, warnings: int, message: str +async def test_integration_schema_error( + hass: HomeAssistant, integration: str, errors: int, warnings: int, message: str ) -> None: - """Test schema error in component.""" + """Test schema error in integration.""" # Make sure they don't exist - files = {YAML_CONFIG_FILE: BASE_CONFIG + f"frontend:\n{component}:\n blah:"} + files = {YAML_CONFIG_FILE: BASE_CONFIG + f"frontend:\n{integration}:\n blah:"} hass.config.safe_mode = True with patch("os.path.isfile", return_value=True), patch_yaml_files(files): res = await async_check_ha_config_file(hass) @@ -215,8 +215,8 @@ async def test_component_schema_error( assert message in warn.message -async def test_component_platform_not_found_2(hass: HomeAssistant) -> None: - """Test errors if component or platform not found.""" +async def test_platform_not_found(hass: HomeAssistant) -> None: + """Test errors if platform not found.""" # Make sure they don't exist files = {YAML_CONFIG_FILE: BASE_CONFIG + "light:\n platform: beer"} with patch("os.path.isfile", return_value=True), patch_yaml_files(files): @@ -274,33 +274,33 @@ async def test_platform_not_found_safe_mode(hass: HomeAssistant) -> None: ( "blah:\n - platform: test\n option1: 123", 1, - "Invalid config for [blah.test]: expected str for dictionary value", + "expected str for dictionary value", {"option1": 123, "platform": "test"}, ), # Test the attached config is unvalidated (key old is removed by validator) ( "blah:\n - platform: test\n old: blah\n option1: 123", 1, - "Invalid config for [blah.test]: expected str for dictionary value", + "expected str for dictionary value", {"old": "blah", "option1": 123, "platform": "test"}, ), # Test base platform configuration error ( "blah:\n - paltfrom: test\n", 1, - "Invalid config for [blah]: required key not provided", + "required key 'platform' not provided", {"paltfrom": "test"}, ), ], ) -async def test_component_platform_schema_error( +async def test_platform_schema_error( hass: HomeAssistant, extra_config: str, warnings: int, message: str | None, config: dict | None, ) -> None: - """Test schema error in component.""" + """Test schema error in platform.""" comp_platform_schema = cv.PLATFORM_SCHEMA.extend({vol.Remove("old"): str}) comp_platform_schema_base = comp_platform_schema.extend({}, extra=vol.ALLOW_EXTRA) mock_integration( @@ -328,7 +328,7 @@ async def test_component_platform_schema_error( assert warn.config == config -async def test_component_config_platform_import_error(hass: HomeAssistant) -> None: +async def test_config_platform_import_error(hass: HomeAssistant) -> None: """Test errors if config platform fails to import.""" # Make sure they don't exist files = {YAML_CONFIG_FILE: BASE_CONFIG + "light:\n platform: beer"} @@ -348,8 +348,8 @@ async def test_component_config_platform_import_error(hass: HomeAssistant) -> No _assert_warnings_errors(res, [], [error]) -async def test_component_platform_import_error(hass: HomeAssistant) -> None: - """Test errors if component or platform not found.""" +async def test_platform_import_error(hass: HomeAssistant) -> None: + """Test errors if platform not found.""" # Make sure they don't exist files = {YAML_CONFIG_FILE: BASE_CONFIG + "light:\n platform: demo"} with patch( @@ -379,8 +379,8 @@ async def test_package_invalid(hass: HomeAssistant) -> None: warning = CheckConfigError( ( - "Package p1 setup failed. Component group cannot be merged. Expected a " - "dict." + "Setup of package 'p1' failed: integration 'group' cannot be merged" + ", expected a dict" ), "homeassistant.packages.p1.group", {"group": ["a"]}, @@ -416,9 +416,7 @@ automation: service_to_call: test.automation input_datetime: """, - hass.config.path( - "blueprints/automation/test_event_service.yaml" - ): """ + hass.config.path("blueprints/automation/test_event_service.yaml"): """ blueprint: name: "Call service based on event" domain: automation @@ -440,12 +438,35 @@ action: assert "input_datetime" in res -async def test_config_platform_raise(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("exception", "errors", "warnings", "message"), + [ + ( + Exception("Broken"), + 1, + 0, + "Unexpected error calling config validator: Broken", + ), + ( + HomeAssistantError("Broken"), + 0, + 1, + "Invalid config for 'bla' at configuration.yaml, line 11: Broken", + ), + ], +) +async def test_config_platform_raise( + hass: HomeAssistant, + exception: Exception, + errors: int, + warnings: int, + message: str, +) -> None: """Test bad config validation platform.""" mock_platform( hass, "bla.config", - Mock(async_validate_config=Mock(side_effect=Exception("Broken"))), + Mock(async_validate_config=Mock(side_effect=exception)), ) files = { YAML_CONFIG_FILE: BASE_CONFIG @@ -457,11 +478,11 @@ bla: with patch("os.path.isfile", return_value=True), patch_yaml_files(files): res = await async_check_ha_config_file(hass) error = CheckConfigError( - "Unexpected error calling config validator: Broken", + message, "bla", {"value": 1}, ) - _assert_warnings_errors(res, [], [error]) + _assert_warnings_errors(res, [error] * warnings, [error] * errors) async def test_removed_yaml_support(hass: HomeAssistant) -> None: diff --git a/tests/helpers/test_config_entry_flow.py b/tests/helpers/test_config_entry_flow.py index 90d8030be79..71c81b096ca 100644 --- a/tests/helpers/test_config_entry_flow.py +++ b/tests/helpers/test_config_entry_flow.py @@ -9,12 +9,7 @@ from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_flow -from tests.common import ( - MockConfigEntry, - MockModule, - mock_entity_platform, - mock_integration, -) +from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform @pytest.fixture @@ -77,7 +72,7 @@ async def test_user_has_confirmation( ) -> None: """Test user requires confirmation to setup.""" discovery_flow_conf["discovered"] = True - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) result = await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_USER}, data={} @@ -184,7 +179,7 @@ async def test_multiple_discoveries( hass: HomeAssistant, discovery_flow_conf: dict[str, bool] ) -> None: """Test we only create one instance for multiple discoveries.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) result = await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_DISCOVERY}, data={} @@ -202,7 +197,7 @@ async def test_only_one_in_progress( hass: HomeAssistant, discovery_flow_conf: dict[str, bool] ) -> None: """Test a user initialized one will finish and cancel discovered one.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) # Discovery starts flow result = await hass.config_entries.flow.async_init( @@ -230,7 +225,7 @@ async def test_import_abort_discovery( hass: HomeAssistant, discovery_flow_conf: dict[str, bool] ) -> None: """Test import will finish and cancel discovered one.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) # Discovery starts flow result = await hass.config_entries.flow.async_init( @@ -280,7 +275,7 @@ async def test_ignored_discoveries( hass: HomeAssistant, discovery_flow_conf: dict[str, bool] ) -> None: """Test we can ignore discovered entries.""" - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) result = await hass.config_entries.flow.async_init( "test", context={"source": config_entries.SOURCE_DISCOVERY}, data={} @@ -373,7 +368,7 @@ async def test_webhook_create_cloudhook( async_remove_entry=config_entry_flow.webhook_async_remove_entry, ), ) - mock_entity_platform(hass, "config_flow.test_single", None) + mock_platform(hass, "test_single.config_flow", None) result = await hass.config_entries.flow.async_init( "test_single", context={"source": config_entries.SOURCE_USER} @@ -428,7 +423,7 @@ async def test_webhook_create_cloudhook_aborts_not_connected( async_remove_entry=config_entry_flow.webhook_async_remove_entry, ), ) - mock_entity_platform(hass, "config_flow.test_single", None) + mock_platform(hass, "test_single.config_flow", None) result = await hass.config_entries.flow.async_init( "test_single", context={"source": config_entries.SOURCE_USER} diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index a9ddd89a0b3..6d1945f2d5f 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -832,6 +832,7 @@ def test_selector_in_serializer() -> None: "selector": { "text": { "multiline": False, + "multiple": False, } } } diff --git a/tests/helpers/test_discovery.py b/tests/helpers/test_discovery.py index 2900cb2c09e..d73bfe84607 100644 --- a/tests/helpers/test_discovery.py +++ b/tests/helpers/test_discovery.py @@ -9,12 +9,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import discovery from homeassistant.helpers.dispatcher import async_dispatcher_send -from tests.common import ( - MockModule, - MockPlatform, - mock_entity_platform, - mock_integration, -) +from tests.common import MockModule, MockPlatform, mock_integration, mock_platform @pytest.fixture @@ -136,7 +131,7 @@ async def test_circular_import(hass: HomeAssistant) -> None: # dependencies are only set in component level # since we are using manifest to hold them mock_integration(hass, MockModule("test_circular", dependencies=["test_component"])) - mock_entity_platform(hass, "switch.test_circular", MockPlatform(setup_platform)) + mock_platform(hass, "test_circular.switch", MockPlatform(setup_platform)) await setup.async_setup_component( hass, diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index b5cda6770c5..40e25633992 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -35,8 +35,8 @@ from tests.common import ( MockModule, MockPlatform, async_fire_time_changed, - mock_entity_platform, mock_integration, + mock_platform, ) _LOGGER = logging.getLogger(__name__) @@ -51,7 +51,7 @@ async def test_setup_loads_platforms(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("test_component", setup=component_setup)) # mock the dependencies mock_integration(hass, MockModule("mod2", dependencies=["test_component"])) - mock_entity_platform(hass, "test_domain.mod2", MockPlatform(platform_setup)) + mock_platform(hass, "mod2.test_domain", MockPlatform(platform_setup)) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -70,8 +70,8 @@ async def test_setup_recovers_when_setup_raises(hass: HomeAssistant) -> None: platform1_setup = Mock(side_effect=Exception("Broken")) platform2_setup = Mock(return_value=None) - mock_entity_platform(hass, "test_domain.mod1", MockPlatform(platform1_setup)) - mock_entity_platform(hass, "test_domain.mod2", MockPlatform(platform2_setup)) + mock_platform(hass, "mod1.test_domain", MockPlatform(platform1_setup)) + mock_platform(hass, "mod2.test_domain", MockPlatform(platform2_setup)) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -130,7 +130,7 @@ async def test_set_scan_interval_via_config( """Test the platform setup.""" add_entities([MockEntity(should_poll=True)]) - mock_entity_platform(hass, "test_domain.platform", MockPlatform(platform_setup)) + mock_platform(hass, "platform.test_domain", MockPlatform(platform_setup)) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -157,7 +157,7 @@ async def test_set_entity_namespace_via_config(hass: HomeAssistant) -> None: platform = MockPlatform(platform_setup) - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -205,7 +205,7 @@ async def test_platform_not_ready(hass: HomeAssistant) -> None: """Test that we retry when platform not ready.""" platform1_setup = Mock(side_effect=[PlatformNotReady, PlatformNotReady, None]) mock_integration(hass, MockModule("mod1")) - mock_entity_platform(hass, "test_domain.mod1", MockPlatform(platform1_setup)) + mock_platform(hass, "mod1.test_domain", MockPlatform(platform1_setup)) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -309,7 +309,7 @@ async def test_setup_dependencies_platform(hass: HomeAssistant) -> None: hass, MockModule("test_component", dependencies=["test_component2"]) ) mock_integration(hass, MockModule("test_component2")) - mock_entity_platform(hass, "test_domain.test_component", MockPlatform()) + mock_platform(hass, "test_component.test_domain", MockPlatform()) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -323,9 +323,9 @@ async def test_setup_dependencies_platform(hass: HomeAssistant) -> None: async def test_setup_entry(hass: HomeAssistant) -> None: """Test setup entry calls async_setup_entry on platform.""" mock_setup_entry = AsyncMock(return_value=True) - mock_entity_platform( + mock_platform( hass, - "test_domain.entry_domain", + "entry_domain.test_domain", MockPlatform( async_setup_entry=mock_setup_entry, scan_interval=timedelta(seconds=5) ), @@ -354,9 +354,9 @@ async def test_setup_entry_platform_not_exist(hass: HomeAssistant) -> None: async def test_setup_entry_fails_duplicate(hass: HomeAssistant) -> None: """Test we don't allow setting up a config entry twice.""" mock_setup_entry = AsyncMock(return_value=True) - mock_entity_platform( + mock_platform( hass, - "test_domain.entry_domain", + "entry_domain.test_domain", MockPlatform(async_setup_entry=mock_setup_entry), ) @@ -372,9 +372,9 @@ async def test_setup_entry_fails_duplicate(hass: HomeAssistant) -> None: async def test_unload_entry_resets_platform(hass: HomeAssistant) -> None: """Test unloading an entry removes all entities.""" mock_setup_entry = AsyncMock(return_value=True) - mock_entity_platform( + mock_platform( hass, - "test_domain.entry_domain", + "entry_domain.test_domain", MockPlatform(async_setup_entry=mock_setup_entry), ) @@ -673,7 +673,7 @@ async def test_platforms_shutdown_on_stop(hass: HomeAssistant) -> None: """Test that we shutdown platforms on stop.""" platform1_setup = Mock(side_effect=[PlatformNotReady, PlatformNotReady, None]) mock_integration(hass, MockModule("mod1")) - mock_entity_platform(hass, "test_domain.mod1", MockPlatform(platform1_setup)) + mock_platform(hass, "mod1.test_domain", MockPlatform(platform1_setup)) component = EntityComponent(_LOGGER, DOMAIN, hass) diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 57020268323..721114c1a7b 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -43,7 +43,7 @@ from tests.common import ( MockEntityPlatform, MockPlatform, async_fire_time_changed, - mock_entity_platform, + mock_platform, mock_registry, ) @@ -195,7 +195,7 @@ async def test_set_scan_interval_via_platform( platform = MockPlatform(platform_setup) platform.SCAN_INTERVAL = timedelta(seconds=30) - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -230,7 +230,7 @@ async def test_platform_warn_slow_setup(hass: HomeAssistant) -> None: """Warn we log when platform setup takes a long time.""" platform = MockPlatform() - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -264,7 +264,7 @@ async def test_platform_error_slow_setup( platform = MockPlatform(async_setup_platform=setup_platform) component = EntityComponent(_LOGGER, DOMAIN, hass) - mock_entity_platform(hass, "test_domain.test_platform", platform) + mock_platform(hass, "test_platform.test_domain", platform) await component.async_setup({DOMAIN: {"platform": "test_platform"}}) await hass.async_block_till_done() assert len(called) == 1 @@ -298,7 +298,7 @@ async def test_parallel_updates_async_platform(hass: HomeAssistant) -> None: """Test async platform does not have parallel_updates limit by default.""" platform = MockPlatform() - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) component._platforms = {} @@ -328,7 +328,7 @@ async def test_parallel_updates_async_platform_with_constant( platform = MockPlatform() platform.PARALLEL_UPDATES = 2 - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) component._platforms = {} @@ -355,7 +355,7 @@ async def test_parallel_updates_sync_platform(hass: HomeAssistant) -> None: """Test sync platform parallel_updates default set to 1.""" platform = MockPlatform() - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) component._platforms = {} @@ -381,7 +381,7 @@ async def test_parallel_updates_no_update_method(hass: HomeAssistant) -> None: """Test platform parallel_updates default set to 0.""" platform = MockPlatform() - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) component._platforms = {} @@ -403,7 +403,7 @@ async def test_parallel_updates_sync_platform_with_constant( platform = MockPlatform() platform.PARALLEL_UPDATES = 2 - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) component._platforms = {} @@ -431,7 +431,7 @@ async def test_parallel_updates_async_platform_updates_in_parallel( """Test an async platform is updated in parallel.""" platform = MockPlatform() - mock_entity_platform(hass, "test_domain.async_platform", platform) + mock_platform(hass, "async_platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) component._platforms = {} @@ -479,7 +479,7 @@ async def test_parallel_updates_sync_platform_updates_in_sequence( """Test a sync platform is updated in sequence.""" platform = MockPlatform() - mock_entity_platform(hass, "test_domain.platform", platform) + mock_platform(hass, "platform.test_domain", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) component._platforms = {} @@ -1660,16 +1660,16 @@ async def test_setup_entry_with_entities_that_block_forever( platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") - mock_entity_platform = MockEntityPlatform( + platform = MockEntityPlatform( hass, platform_name=config_entry.domain, platform=platform ) with patch.object(entity_platform, "SLOW_ADD_ENTITY_MAX_WAIT", 0.01), patch.object( entity_platform, "SLOW_ADD_MIN_TIMEOUT", 0.01 ): - assert await mock_entity_platform.async_setup_entry(config_entry) + assert await platform.async_setup_entry(config_entry) await hass.async_block_till_done() - full_name = f"{mock_entity_platform.domain}.{config_entry.domain}" + full_name = f"{platform.domain}.{config_entry.domain}" assert full_name in hass.config.components assert len(hass.states.async_entity_ids()) == 0 assert len(entity_registry.entities) == 1 diff --git a/tests/helpers/test_reload.py b/tests/helpers/test_reload.py index ad3b7ccb243..586dbc19eb8 100644 --- a/tests/helpers/test_reload.py +++ b/tests/helpers/test_reload.py @@ -3,10 +3,12 @@ import logging from unittest.mock import AsyncMock, Mock, patch import pytest +import voluptuous as vol from homeassistant import config from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigValidationError, HomeAssistantError from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import async_get_platforms from homeassistant.helpers.reload import ( @@ -21,8 +23,8 @@ from tests.common import ( MockModule, MockPlatform, get_fixture_path, - mock_entity_platform, mock_integration, + mock_platform, ) _LOGGER = logging.getLogger(__name__) @@ -42,8 +44,8 @@ async def test_reload_platform(hass: HomeAssistant) -> None: mock_integration(hass, MockModule(DOMAIN, setup=component_setup)) mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) - mock_platform = MockPlatform(async_setup_platform=setup_platform) - mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) + platform = MockPlatform(async_setup_platform=setup_platform) + mock_platform(hass, f"{PLATFORM}.{DOMAIN}", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -82,8 +84,8 @@ async def test_setup_reload_service(hass: HomeAssistant) -> None: mock_integration(hass, MockModule(DOMAIN, setup=component_setup)) mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) - mock_platform = MockPlatform(async_setup_platform=setup_platform) - mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) + platform = MockPlatform(async_setup_platform=setup_platform) + mock_platform(hass, f"{PLATFORM}.{DOMAIN}", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -123,8 +125,8 @@ async def test_setup_reload_service_when_async_process_component_config_fails( mock_integration(hass, MockModule(DOMAIN, setup=component_setup)) mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) - mock_platform = MockPlatform(async_setup_platform=setup_platform) - mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) + platform = MockPlatform(async_setup_platform=setup_platform) + mock_platform(hass, f"{PLATFORM}.{DOMAIN}", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -139,7 +141,9 @@ async def test_setup_reload_service_when_async_process_component_config_fails( yaml_path = get_fixture_path("helpers/reload_configuration.yaml") with patch.object(config, "YAML_CONFIG_FILE", yaml_path), patch.object( - config, "async_process_component_config", return_value=None + config, + "async_process_component_config", + return_value=config.IntegrationConfigInfo(None, []), ): await hass.services.async_call( PLATFORM, @@ -173,8 +177,8 @@ async def test_setup_reload_service_with_platform_that_provides_async_reset_plat mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) - mock_platform = MockPlatform(async_setup_platform=setup_platform) - mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) + platform = MockPlatform(async_setup_platform=setup_platform) + mock_platform(hass, f"{PLATFORM}.{DOMAIN}", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) @@ -208,8 +212,49 @@ async def test_async_integration_yaml_config(hass: HomeAssistant) -> None: yaml_path = get_fixture_path(f"helpers/{DOMAIN}_configuration.yaml") with patch.object(config, "YAML_CONFIG_FILE", yaml_path): processed_config = await async_integration_yaml_config(hass, DOMAIN) + assert processed_config == {DOMAIN: [{"name": "one"}, {"name": "two"}]} + # Test fetching yaml config does not raise when the raise_on_failure option is set + processed_config = await async_integration_yaml_config( + hass, DOMAIN, raise_on_failure=True + ) + assert processed_config == {DOMAIN: [{"name": "one"}, {"name": "two"}]} - assert processed_config == {DOMAIN: [{"name": "one"}, {"name": "two"}]} + +async def test_async_integration_failing_yaml_config(hass: HomeAssistant) -> None: + """Test reloading yaml config for an integration fails. + + In case an integration reloads its yaml configuration it should throw when + the new config failed to load and raise_on_failure is set to True. + """ + schema_without_name_attr = vol.Schema({vol.Required("some_option"): str}) + + mock_integration(hass, MockModule(DOMAIN, config_schema=schema_without_name_attr)) + + yaml_path = get_fixture_path(f"helpers/{DOMAIN}_configuration.yaml") + with patch.object(config, "YAML_CONFIG_FILE", yaml_path): + # Test fetching yaml config does not raise without raise_on_failure option + processed_config = await async_integration_yaml_config(hass, DOMAIN) + assert processed_config is None + # Test fetching yaml config does not raise when the raise_on_failure option is set + with pytest.raises(ConfigValidationError): + await async_integration_yaml_config(hass, DOMAIN, raise_on_failure=True) + + +async def test_async_integration_failing_on_reload(hass: HomeAssistant) -> None: + """Test reloading yaml config for an integration fails with an other exception. + + In case an integration reloads its yaml configuration it should throw when + the new config failed to load and raise_on_failure is set to True. + """ + mock_integration(hass, MockModule(DOMAIN)) + + yaml_path = get_fixture_path(f"helpers/{DOMAIN}_configuration.yaml") + with patch.object(config, "YAML_CONFIG_FILE", yaml_path), patch( + "homeassistant.config.async_process_component_config", + side_effect=HomeAssistantError(), + ), pytest.raises(HomeAssistantError): + # Test fetching yaml config does raise when the raise_on_failure option is set + await async_integration_yaml_config(hass, DOMAIN, raise_on_failure=True) async def test_async_integration_missing_yaml_config(hass: HomeAssistant) -> None: diff --git a/tests/helpers/test_restore_state.py b/tests/helpers/test_restore_state.py index fa0a14b8fbb..f01718d6af6 100644 --- a/tests/helpers/test_restore_state.py +++ b/tests/helpers/test_restore_state.py @@ -31,8 +31,8 @@ from tests.common import ( MockModule, MockPlatform, async_fire_time_changed, - mock_entity_platform, mock_integration, + mock_platform, ) _LOGGER = logging.getLogger(__name__) @@ -499,8 +499,8 @@ async def test_restore_entity_end_to_end( mock_integration(hass, MockModule(DOMAIN, setup=component_setup)) mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) - mock_platform = MockPlatform(async_setup_platform=async_setup_platform) - mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) + platform = MockPlatform(async_setup_platform=async_setup_platform) + mock_platform(hass, f"{PLATFORM}.{DOMAIN}", platform) component = EntityComponent(_LOGGER, DOMAIN, hass) diff --git a/tests/helpers/test_schema_config_entry_flow.py b/tests/helpers/test_schema_config_entry_flow.py index b069f0cb8f5..58f6a261aef 100644 --- a/tests/helpers/test_schema_config_entry_flow.py +++ b/tests/helpers/test_schema_config_entry_flow.py @@ -23,13 +23,7 @@ from homeassistant.helpers.schema_config_entry_flow import ( ) from homeassistant.util.decorator import Registry -from tests.common import ( - MockConfigEntry, - MockModule, - mock_entity_platform, - mock_integration, - mock_platform, -) +from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform TEST_DOMAIN = "test" @@ -232,7 +226,7 @@ async def test_options_flow_advanced_option( options_flow = OPTIONS_FLOW mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) config_entry = MockConfigEntry( data={}, domain="test", @@ -521,7 +515,7 @@ async def test_suggested_values( options_flow = OPTIONS_FLOW mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) config_entry = MockConfigEntry( data={}, domain="test", @@ -634,7 +628,7 @@ async def test_options_flow_state(hass: HomeAssistant) -> None: options_flow = OPTIONS_FLOW mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) config_entry = MockConfigEntry( data={}, domain="test", @@ -700,7 +694,7 @@ async def test_options_flow_omit_optional_keys( options_flow = OPTIONS_FLOW mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) config_entry = MockConfigEntry( data={}, domain="test", diff --git a/tests/helpers/test_selector.py b/tests/helpers/test_selector.py index 1e449fd103a..c4ad244620b 100644 --- a/tests/helpers/test_selector.py +++ b/tests/helpers/test_selector.py @@ -602,6 +602,11 @@ def test_object_selector_schema(schema, valid_selections, invalid_selections) -> ({"multiline": True}, (), ()), ({"multiline": False, "type": "email"}, (), ()), ({"prefix": "before", "suffix": "after"}, (), ()), + ( + {"multiple": True}, + (["abc123", "def456"],), + ("abc123", None, ["abc123", None]), + ), ), ) def test_text_selector_schema(schema, valid_selections, invalid_selections) -> None: @@ -907,6 +912,16 @@ def test_rgb_color_selector_schema( (100, 200), (99, 201), ), + ( + {"unit": "mired", "min": 100, "max": 200}, + (100, 200), + (99, 201), + ), + ( + {"unit": "kelvin", "min": 1000, "max": 2000}, + (1000, 2000), + (999, 2001), + ), ), ) def test_color_tempselector_schema( diff --git a/tests/helpers/test_system_info.py b/tests/helpers/test_system_info.py index ebb0cc35c20..5c3697ad936 100644 --- a/tests/helpers/test_system_info.py +++ b/tests/helpers/test_system_info.py @@ -38,13 +38,9 @@ async def test_get_system_info_supervisor_not_available( "homeassistant.helpers.system_info.is_docker_env", return_value=True ), patch( "homeassistant.helpers.system_info.is_official_image", return_value=True - ), patch( - "homeassistant.components.hassio.is_hassio", return_value=True - ), patch( + ), patch("homeassistant.components.hassio.is_hassio", return_value=True), patch( "homeassistant.components.hassio.get_info", return_value=None - ), patch( - "homeassistant.helpers.system_info.cached_get_user", return_value="root" - ): + ), patch("homeassistant.helpers.system_info.cached_get_user", return_value="root"): info = await async_get_system_info(hass) assert isinstance(info, dict) assert info["version"] == current_version @@ -60,9 +56,7 @@ async def test_get_system_info_supervisor_not_loaded(hass: HomeAssistant) -> Non "homeassistant.helpers.system_info.is_docker_env", return_value=True ), patch( "homeassistant.helpers.system_info.is_official_image", return_value=True - ), patch( - "homeassistant.components.hassio.get_info", return_value=None - ), patch.dict( + ), patch("homeassistant.components.hassio.get_info", return_value=None), patch.dict( os.environ, {"SUPERVISOR": "127.0.0.1"} ): info = await async_get_system_info(hass) @@ -79,9 +73,7 @@ async def test_container_installationtype(hass: HomeAssistant) -> None: "homeassistant.helpers.system_info.is_docker_env", return_value=True ), patch( "homeassistant.helpers.system_info.is_official_image", return_value=True - ), patch( - "homeassistant.helpers.system_info.cached_get_user", return_value="root" - ): + ), patch("homeassistant.helpers.system_info.cached_get_user", return_value="root"): info = await async_get_system_info(hass) assert info["installation_type"] == "Home Assistant Container" @@ -89,9 +81,7 @@ async def test_container_installationtype(hass: HomeAssistant) -> None: "homeassistant.helpers.system_info.is_docker_env", return_value=True ), patch( "homeassistant.helpers.system_info.is_official_image", return_value=False - ), patch( - "homeassistant.helpers.system_info.cached_get_user", return_value="user" - ): + ), patch("homeassistant.helpers.system_info.cached_get_user", return_value="user"): info = await async_get_system_info(hass) assert info["installation_type"] == "Unsupported Third Party Container" diff --git a/tests/snapshots/test_config.ambr b/tests/snapshots/test_config.ambr index e7afa47537a..7438bda5cde 100644 --- a/tests/snapshots/test_config.ambr +++ b/tests/snapshots/test_config.ambr @@ -1,66 +1,339 @@ # serializer version: 1 # name: test_component_config_validation_error[basic] list([ - "Invalid config for [iot_domain.non_adr_0007]: expected str for dictionary value @ data['option1']. Got 123. (See /fixtures/core/config/component_validation/basic/configuration.yaml, line 6).", - "Invalid config for [iot_domain]: required key not provided @ data['platform']. Got None. (See /fixtures/core/config/component_validation/basic/configuration.yaml, line 9).", - "Invalid config for [adr_0007_2]: required key not provided @ data['adr_0007_2']['host']. Got None. (See ?, line ?).", - "Invalid config for [adr_0007_3]: expected int for dictionary value @ data['adr_0007_3']['port']. Got 'foo'. (See /fixtures/core/config/component_validation/basic/configuration.yaml, line 20).", + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain' at configuration.yaml, line 6: required key 'platform' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 9: expected str for dictionary value 'option1', got 123", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 12: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 18: required key 'option1' not provided + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 19: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 20: expected str for dictionary value 'option2', got 123 + ''', + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_2' at configuration.yaml, line 27: required key 'host' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_3' at configuration.yaml, line 32: expected int for dictionary value 'adr_0007_3->port', got 'foo'", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_4' at configuration.yaml, line 37: 'no_such_option' is an invalid option for 'adr_0007_4', check: adr_0007_4->no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'adr_0007_5' at configuration.yaml, line 43: required key 'host' not provided + Invalid config for 'adr_0007_5' at configuration.yaml, line 44: 'no_such_option' is an invalid option for 'adr_0007_5', check: adr_0007_5->no_such_option + Invalid config for 'adr_0007_5' at configuration.yaml, line 45: expected int for dictionary value 'adr_0007_5->port', got 'foo' + ''', + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'custom_validator_ok_2' at configuration.yaml, line 52: required key 'host' not provided", + }), + dict({ + 'has_exc_info': True, + 'message': "Invalid config for 'custom_validator_bad_1' at configuration.yaml, line 55: broken", + }), + dict({ + 'has_exc_info': True, + 'message': 'Unknown error calling custom_validator_bad_2 config validator', + }), ]) # --- # name: test_component_config_validation_error[basic_include] list([ - "Invalid config for [iot_domain.non_adr_0007]: expected str for dictionary value @ data['option1']. Got 123. (See /fixtures/core/config/component_validation/basic_include/integrations/iot_domain.yaml, line 5).", - "Invalid config for [iot_domain]: required key not provided @ data['platform']. Got None. (See /fixtures/core/config/component_validation/basic_include/integrations/iot_domain.yaml, line 8).", - "Invalid config for [adr_0007_2]: required key not provided @ data['adr_0007_2']['host']. Got None. (See ?, line ?).", - "Invalid config for [adr_0007_3]: expected int for dictionary value @ data['adr_0007_3']['port']. Got 'foo'. (See /fixtures/core/config/component_validation/basic_include/configuration.yaml, line 4).", + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain' at integrations/iot_domain.yaml, line 5: required key 'platform' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 8: expected str for dictionary value 'option1', got 123", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 11: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 17: required key 'option1' not provided + Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 18: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option + Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 19: expected str for dictionary value 'option2', got 123 + ''', + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_2' at configuration.yaml, line 3: required key 'host' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_3' at integrations/adr_0007_3.yaml, line 3: expected int for dictionary value 'adr_0007_3->port', got 'foo'", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_4' at integrations/adr_0007_4.yaml, line 3: 'no_such_option' is an invalid option for 'adr_0007_4', check: adr_0007_4->no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'adr_0007_5' at configuration.yaml, line 6: required key 'host' not provided + Invalid config for 'adr_0007_5' at integrations/adr_0007_5.yaml, line 5: 'no_such_option' is an invalid option for 'adr_0007_5', check: adr_0007_5->no_such_option + Invalid config for 'adr_0007_5' at integrations/adr_0007_5.yaml, line 6: expected int for dictionary value 'adr_0007_5->port', got 'foo' + ''', + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'custom_validator_ok_2' at configuration.yaml, line 8: required key 'host' not provided", + }), + dict({ + 'has_exc_info': True, + 'message': "Invalid config for 'custom_validator_bad_1' at configuration.yaml, line 9: broken", + }), + dict({ + 'has_exc_info': True, + 'message': 'Unknown error calling custom_validator_bad_2 config validator', + }), ]) # --- # name: test_component_config_validation_error[include_dir_list] list([ - "Invalid config for [iot_domain.non_adr_0007]: expected str for dictionary value @ data['option1']. Got 123. (See /fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_2.yaml, line 2).", - "Invalid config for [iot_domain]: required key not provided @ data['platform']. Got None. (See /fixtures/core/config/component_validation/include_dir_list/iot_domain/iot_domain_3.yaml, line 2).", + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain' at iot_domain/iot_domain_2.yaml, line 2: required key 'platform' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_3.yaml, line 3: expected str for dictionary value 'option1', got 123", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_4.yaml, line 3: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_5.yaml, line 5: required key 'option1' not provided + Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_5.yaml, line 6: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option + Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_5.yaml, line 7: expected str for dictionary value 'option2', got 123 + ''', + }), ]) # --- # name: test_component_config_validation_error[include_dir_merge_list] list([ - "Invalid config for [iot_domain.non_adr_0007]: expected str for dictionary value @ data['option1']. Got 123. (See /fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_2.yaml, line 2).", - "Invalid config for [iot_domain]: required key not provided @ data['platform']. Got None. (See /fixtures/core/config/component_validation/include_dir_merge_list/iot_domain/iot_domain_2.yaml, line 5).", + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain' at iot_domain/iot_domain_1.yaml, line 5: required key 'platform' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_2.yaml, line 3: expected str for dictionary value 'option1', got 123", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_2.yaml, line 6: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_2.yaml, line 12: required key 'option1' not provided + Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_2.yaml, line 13: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option + Invalid config for 'iot_domain.non_adr_0007' at iot_domain/iot_domain_2.yaml, line 14: expected str for dictionary value 'option2', got 123 + ''', + }), ]) # --- # name: test_component_config_validation_error[packages] list([ - "Invalid config for [iot_domain.non_adr_0007]: expected str for dictionary value @ data['option1']. Got 123. (See /fixtures/core/config/component_validation/packages/configuration.yaml, line 11).", - "Invalid config for [iot_domain]: required key not provided @ data['platform']. Got None. (See /fixtures/core/config/component_validation/packages/configuration.yaml, line 16).", - "Invalid config for [adr_0007_2]: required key not provided @ data['adr_0007_2']['host']. Got None. (See ?, line ?).", - "Invalid config for [adr_0007_3]: expected int for dictionary value @ data['adr_0007_3']['port']. Got 'foo'. (See ?, line ?).", + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain' at configuration.yaml, line 11: required key 'platform' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 16: expected str for dictionary value 'option1', got 123", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 21: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 29: required key 'option1' not provided + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 30: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 31: expected str for dictionary value 'option2', got 123 + ''', + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_2' at configuration.yaml, line 38: required key 'host' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_3' at configuration.yaml, line 43: expected int for dictionary value 'adr_0007_3->port', got 'foo'", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_4' at configuration.yaml, line 48: 'no_such_option' is an invalid option for 'adr_0007_4', check: adr_0007_4->no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'adr_0007_5' at configuration.yaml, line 54: required key 'host' not provided + Invalid config for 'adr_0007_5' at configuration.yaml, line 55: 'no_such_option' is an invalid option for 'adr_0007_5', check: adr_0007_5->no_such_option + Invalid config for 'adr_0007_5' at configuration.yaml, line 56: expected int for dictionary value 'adr_0007_5->port', got 'foo' + ''', + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'custom_validator_ok_2' at configuration.yaml, line 64: required key 'host' not provided", + }), + dict({ + 'has_exc_info': True, + 'message': "Invalid config for 'custom_validator_bad_1' at configuration.yaml, line 67: broken", + }), + dict({ + 'has_exc_info': True, + 'message': 'Unknown error calling custom_validator_bad_2 config validator', + }), ]) # --- # name: test_component_config_validation_error[packages_include_dir_named] list([ - "Invalid config for [iot_domain.non_adr_0007]: expected str for dictionary value @ data['option1']. Got 123. (See /fixtures/core/config/component_validation/packages_include_dir_named/integrations/iot_domain.yaml, line 6).", - "Invalid config for [iot_domain]: required key not provided @ data['platform']. Got None. (See /fixtures/core/config/component_validation/packages_include_dir_named/integrations/iot_domain.yaml, line 9).", - "Invalid config for [adr_0007_2]: required key not provided @ data['adr_0007_2']['host']. Got None. (See ?, line ?).", - "Invalid config for [adr_0007_3]: expected int for dictionary value @ data['adr_0007_3']['port']. Got 'foo'. (See ?, line ?).", + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_2' at integrations/adr_0007_2.yaml, line 2: required key 'host' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_3' at integrations/adr_0007_3.yaml, line 4: expected int for dictionary value 'adr_0007_3->port', got 'foo'", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'adr_0007_4' at integrations/adr_0007_4.yaml, line 4: 'no_such_option' is an invalid option for 'adr_0007_4', check: adr_0007_4->no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'adr_0007_5' at integrations/adr_0007_5.yaml, line 5: required key 'host' not provided + Invalid config for 'adr_0007_5' at integrations/adr_0007_5.yaml, line 6: 'no_such_option' is an invalid option for 'adr_0007_5', check: adr_0007_5->no_such_option + Invalid config for 'adr_0007_5' at integrations/adr_0007_5.yaml, line 7: expected int for dictionary value 'adr_0007_5->port', got 'foo' + ''', + }), + dict({ + 'has_exc_info': True, + 'message': "Invalid config for 'custom_validator_bad_1' at integrations/custom_validator_bad_1.yaml, line 2: broken", + }), + dict({ + 'has_exc_info': True, + 'message': 'Unknown error calling custom_validator_bad_2 config validator', + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'custom_validator_ok_2' at integrations/custom_validator_ok_2.yaml, line 2: required key 'host' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain' at integrations/iot_domain.yaml, line 6: required key 'platform' not provided", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 9: expected str for dictionary value 'option1', got 123", + }), + dict({ + 'has_exc_info': False, + 'message': "Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 12: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option", + }), + dict({ + 'has_exc_info': False, + 'message': ''' + Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 18: required key 'option1' not provided + Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 19: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option + Invalid config for 'iot_domain.non_adr_0007' at integrations/iot_domain.yaml, line 20: expected str for dictionary value 'option2', got 123 + ''', + }), + ]) +# --- +# name: test_component_config_validation_error_with_docs[basic] + list([ + "Invalid config for 'iot_domain' at configuration.yaml, line 6: required key 'platform' not provided, please check the docs at https://www.home-assistant.io/integrations/iot_domain", + "Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 9: expected str for dictionary value 'option1', got 123, please check the docs at https://www.home-assistant.io/integrations/non_adr_0007", + "Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 12: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option, please check the docs at https://www.home-assistant.io/integrations/non_adr_0007", + ''' + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 18: required key 'option1' not provided, please check the docs at https://www.home-assistant.io/integrations/non_adr_0007 + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 19: 'no_such_option' is an invalid option for 'iot_domain.non_adr_0007', check: no_such_option, please check the docs at https://www.home-assistant.io/integrations/non_adr_0007 + Invalid config for 'iot_domain.non_adr_0007' at configuration.yaml, line 20: expected str for dictionary value 'option2', got 123, please check the docs at https://www.home-assistant.io/integrations/non_adr_0007 + ''', + "Invalid config for 'adr_0007_2' at configuration.yaml, line 27: required key 'host' not provided, please check the docs at https://www.home-assistant.io/integrations/adr_0007_2", + "Invalid config for 'adr_0007_3' at configuration.yaml, line 32: expected int for dictionary value 'adr_0007_3->port', got 'foo', please check the docs at https://www.home-assistant.io/integrations/adr_0007_3", + "Invalid config for 'adr_0007_4' at configuration.yaml, line 37: 'no_such_option' is an invalid option for 'adr_0007_4', check: adr_0007_4->no_such_option, please check the docs at https://www.home-assistant.io/integrations/adr_0007_4", + ''' + Invalid config for 'adr_0007_5' at configuration.yaml, line 43: required key 'host' not provided, please check the docs at https://www.home-assistant.io/integrations/adr_0007_5 + Invalid config for 'adr_0007_5' at configuration.yaml, line 44: 'no_such_option' is an invalid option for 'adr_0007_5', check: adr_0007_5->no_such_option, please check the docs at https://www.home-assistant.io/integrations/adr_0007_5 + Invalid config for 'adr_0007_5' at configuration.yaml, line 45: expected int for dictionary value 'adr_0007_5->port', got 'foo', please check the docs at https://www.home-assistant.io/integrations/adr_0007_5 + ''', + "Invalid config for 'custom_validator_ok_2' at configuration.yaml, line 52: required key 'host' not provided, please check the docs at https://www.home-assistant.io/integrations/custom_validator_ok_2", + "Invalid config for 'custom_validator_bad_1' at configuration.yaml, line 55: broken, please check the docs at https://www.home-assistant.io/integrations/custom_validator_bad_1", + 'Unknown error calling custom_validator_bad_2 config validator', ]) # --- # name: test_package_merge_error[packages] list([ - 'Package pack_1 setup failed. Integration adr_0007_1 cannot be merged. Dict expected in main config. (See /fixtures/core/config/package_errors/packages/configuration.yaml:9).', - 'Package pack_2 setup failed. Integration adr_0007_2 cannot be merged. Expected a dict. (See /fixtures/core/config/package_errors/packages/configuration.yaml:13).', - "Package pack_4 setup failed. Integration adr_0007_3 has duplicate key 'host' (See /fixtures/core/config/package_errors/packages/configuration.yaml:20).", + "Setup of package 'pack_1' at configuration.yaml, line 7 failed: integration 'adr_0007_1' cannot be merged, dict expected in main config", + "Setup of package 'pack_2' at configuration.yaml, line 11 failed: integration 'adr_0007_2' cannot be merged, expected a dict", + "Setup of package 'pack_4' at configuration.yaml, line 19 failed: integration 'adr_0007_3' has duplicate key 'host'", + "Setup of package 'pack_5' at configuration.yaml, line 22 failed: Integration 'unknown_integration' not found.", ]) # --- # name: test_package_merge_error[packages_include_dir_named] list([ - 'Package adr_0007_1 setup failed. Integration adr_0007_1 cannot be merged. Dict expected in main config. (See /fixtures/core/config/package_errors/packages_include_dir_named/integrations/adr_0007_1.yaml:2).', - 'Package adr_0007_2 setup failed. Integration adr_0007_2 cannot be merged. Expected a dict. (See /fixtures/core/config/package_errors/packages_include_dir_named/integrations/adr_0007_2.yaml:2).', - "Package adr_0007_3_2 setup failed. Integration adr_0007_3 has duplicate key 'host' (See /fixtures/core/config/package_errors/packages_include_dir_named/integrations/adr_0007_3_2.yaml:1).", + "Setup of package 'adr_0007_1' at integrations/adr_0007_1.yaml, line 2 failed: integration 'adr_0007_1' cannot be merged, dict expected in main config", + "Setup of package 'adr_0007_2' at integrations/adr_0007_2.yaml, line 2 failed: integration 'adr_0007_2' cannot be merged, expected a dict", + "Setup of package 'adr_0007_3_2' at integrations/adr_0007_3_2.yaml, line 1 failed: integration 'adr_0007_3' has duplicate key 'host'", + "Setup of package 'unknown_integration' at integrations/unknown_integration.yaml, line 2 failed: Integration 'unknown_integration' not found.", + ]) +# --- +# name: test_package_merge_exception[packages-error0] + list([ + "Setup of package 'pack_1' at configuration.yaml, line 3 failed: Integration test_domain caused error: No such file or directory: b'liblibc.a'", + ]) +# --- +# name: test_package_merge_exception[packages-error1] + list([ + "Setup of package 'pack_1' at configuration.yaml, line 3 failed: Integration test_domain caused error: ModuleNotFoundError: No module named 'not_installed_something'", + ]) +# --- +# name: test_package_merge_exception[packages_include_dir_named-error0] + list([ + "Setup of package 'unknown_integration' at integrations/unknown_integration.yaml, line 1 failed: Integration test_domain caused error: No such file or directory: b'liblibc.a'", + ]) +# --- +# name: test_package_merge_exception[packages_include_dir_named-error1] + list([ + "Setup of package 'unknown_integration' at integrations/unknown_integration.yaml, line 1 failed: Integration test_domain caused error: ModuleNotFoundError: No module named 'not_installed_something'", ]) # --- # name: test_yaml_error[basic] ''' mapping values are not allowed here - in "/fixtures/core/config/yaml_errors/basic/configuration.yaml", line 4, column 14 + in "configuration.yaml", line 4, column 14 ''' # --- # name: test_yaml_error[basic].1 @@ -74,7 +347,7 @@ # name: test_yaml_error[basic_include] ''' mapping values are not allowed here - in "/fixtures/core/config/yaml_errors/basic_include/integrations/iot_domain.yaml", line 3, column 12 + in "integrations/iot_domain.yaml", line 3, column 12 ''' # --- # name: test_yaml_error[basic_include].1 @@ -88,7 +361,7 @@ # name: test_yaml_error[include_dir_list] ''' mapping values are not allowed here - in "/fixtures/core/config/yaml_errors/include_dir_list/iot_domain/iot_domain_1.yaml", line 3, column 10 + in "iot_domain/iot_domain_1.yaml", line 3, column 10 ''' # --- # name: test_yaml_error[include_dir_list].1 @@ -102,7 +375,7 @@ # name: test_yaml_error[include_dir_merge_list] ''' mapping values are not allowed here - in "/fixtures/core/config/yaml_errors/include_dir_merge_list/iot_domain/iot_domain_1.yaml", line 3, column 12 + in "iot_domain/iot_domain_1.yaml", line 3, column 12 ''' # --- # name: test_yaml_error[include_dir_merge_list].1 @@ -116,7 +389,7 @@ # name: test_yaml_error[packages_include_dir_named] ''' mapping values are not allowed here - in "/fixtures/core/config/yaml_errors/packages_include_dir_named/integrations/adr_0007_1.yaml", line 4, column 9 + in "integrations/adr_0007_1.yaml", line 4, column 9 ''' # --- # name: test_yaml_error[packages_include_dir_named].1 diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 555bcbdf6b2..42d679d7ce6 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -23,8 +23,8 @@ from .common import ( MockModule, MockPlatform, get_test_config_dir, - mock_entity_platform, mock_integration, + mock_platform, ) VERSION_PATH = os.path.join(get_test_config_dir(), config_util.VERSION_FILE) @@ -40,7 +40,7 @@ async def apply_stop_hass(stop_hass: None) -> None: """Make sure all hass are stopped.""" -@pytest.fixture(autouse=True) +@pytest.fixture(scope="session", autouse=True) def mock_http_start_stop() -> Generator[None, None, None]: """Mock HTTP start and stop.""" with patch( @@ -327,7 +327,7 @@ async def test_setup_after_deps_via_platform(hass: HomeAssistant) -> None: partial_manifest={"after_dependencies": ["after_dep_of_platform_int"]}, ), ) - mock_entity_platform(hass, "light.platform_int", MockPlatform()) + mock_platform(hass, "platform_int.light", MockPlatform()) @callback def continue_loading(_): @@ -719,17 +719,19 @@ async def test_setup_hass_invalid_core_config( event_loop: asyncio.AbstractEventLoop, ) -> None: """Test it works.""" - hass = await bootstrap.async_setup_hass( - runner.RuntimeConfig( - config_dir=get_test_config_dir(), - verbose=False, - log_rotate_days=10, - log_file="", - log_no_color=False, - skip_pip=True, - recovery_mode=False, - ), - ) + with patch("homeassistant.bootstrap.async_notify_setup_error") as mock_notify: + hass = await bootstrap.async_setup_hass( + runner.RuntimeConfig( + config_dir=get_test_config_dir(), + verbose=False, + log_rotate_days=10, + log_file="", + log_no_color=False, + skip_pip=True, + recovery_mode=False, + ), + ) + assert len(mock_notify.mock_calls) == 1 assert "recovery_mode" in hass.config.components @@ -1011,7 +1013,10 @@ async def test_bootstrap_dependencies( with patch( "homeassistant.setup.loader.async_get_integrations", side_effect=mock_async_get_integrations, - ), patch("homeassistant.config.async_process_component_config", return_value={}): + ), patch( + "homeassistant.config.async_process_component_config", + return_value=config_util.IntegrationConfigInfo({}, []), + ): bootstrap.async_set_domains_to_be_loaded(hass, {integration}) await bootstrap.async_setup_multi_components(hass, {integration}, {}) await hass.async_block_till_done() diff --git a/tests/test_config.py b/tests/test_config.py index d97d4f7a2c8..de5e7e0581d 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -30,9 +30,11 @@ from homeassistant.const import ( __version__, ) from homeassistant.core import ConfigSource, HomeAssistant, HomeAssistantError +from homeassistant.exceptions import ConfigValidationError from homeassistant.helpers import config_validation as cv, issue_registry as ir import homeassistant.helpers.check_config as check_config from homeassistant.helpers.entity import Entity +from homeassistant.helpers.typing import ConfigType from homeassistant.loader import Integration, async_get_integration from homeassistant.util.unit_system import ( _CONF_UNIT_SYSTEM_US_CUSTOMARY, @@ -114,14 +116,35 @@ async def mock_iot_domain_integration(hass: HomeAssistant) -> Integration: @pytest.fixture -async def mock_non_adr_0007_integration(hass) -> None: +async def mock_iot_domain_integration_with_docs(hass: HomeAssistant) -> Integration: + """Mock an integration which provides an IoT domain.""" + comp_platform_schema = cv.PLATFORM_SCHEMA.extend({vol.Remove("old"): str}) + comp_platform_schema_base = comp_platform_schema.extend({}, extra=vol.ALLOW_EXTRA) + + return mock_integration( + hass, + MockModule( + "iot_domain", + platform_schema_base=comp_platform_schema_base, + platform_schema=comp_platform_schema, + partial_manifest={ + "documentation": "https://www.home-assistant.io/integrations/iot_domain" + }, + ), + ) + + +@pytest.fixture +async def mock_non_adr_0007_integration(hass: HomeAssistant) -> None: """Mock a non-ADR-0007 compliant integration with iot_domain platform. The integration allows setting up iot_domain entities under the iot_domain's configuration key """ - test_platform_schema = IOT_DOMAIN_PLATFORM_SCHEMA.extend({"option1": str}) + test_platform_schema = IOT_DOMAIN_PLATFORM_SCHEMA.extend( + {vol.Required("option1"): str, vol.Optional("option2"): str} + ) mock_platform( hass, "non_adr_0007.iot_domain", @@ -130,16 +153,49 @@ async def mock_non_adr_0007_integration(hass) -> None: @pytest.fixture -async def mock_adr_0007_integrations(hass) -> list[Integration]: +async def mock_non_adr_0007_integration_with_docs(hass: HomeAssistant) -> None: + """Mock a non-ADR-0007 compliant integration with iot_domain platform. + + The integration allows setting up iot_domain entities under the iot_domain's + configuration key + """ + + mock_integration( + hass, + MockModule( + "non_adr_0007", + partial_manifest={ + "documentation": "https://www.home-assistant.io/integrations/non_adr_0007" + }, + ), + ) + test_platform_schema = IOT_DOMAIN_PLATFORM_SCHEMA.extend( + {vol.Required("option1"): str, vol.Optional("option2"): str} + ) + mock_platform( + hass, + "non_adr_0007.iot_domain", + MockPlatform(platform_schema=test_platform_schema), + ) + + +@pytest.fixture +async def mock_adr_0007_integrations(hass: HomeAssistant) -> list[Integration]: """Mock ADR-0007 compliant integrations.""" integrations = [] - for domain in ["adr_0007_1", "adr_0007_2", "adr_0007_3"]: + for domain in [ + "adr_0007_1", + "adr_0007_2", + "adr_0007_3", + "adr_0007_4", + "adr_0007_5", + ]: adr_0007_config_schema = vol.Schema( { domain: vol.Schema( { vol.Required("host"): str, - vol.Required("port", default=8080): int, + vol.Optional("port", default=8080): int, } ) }, @@ -154,6 +210,161 @@ async def mock_adr_0007_integrations(hass) -> list[Integration]: return integrations +@pytest.fixture +async def mock_adr_0007_integrations_with_docs( + hass: HomeAssistant, +) -> list[Integration]: + """Mock ADR-0007 compliant integrations.""" + integrations = [] + for domain in [ + "adr_0007_1", + "adr_0007_2", + "adr_0007_3", + "adr_0007_4", + "adr_0007_5", + ]: + adr_0007_config_schema = vol.Schema( + { + domain: vol.Schema( + { + vol.Required("host"): str, + vol.Optional("port", default=8080): int, + } + ) + }, + extra=vol.ALLOW_EXTRA, + ) + integrations.append( + mock_integration( + hass, + MockModule( + domain, + config_schema=adr_0007_config_schema, + partial_manifest={ + "documentation": f"https://www.home-assistant.io/integrations/{domain}" + }, + ), + ) + ) + return integrations + + +@pytest.fixture +async def mock_custom_validator_integrations(hass: HomeAssistant) -> list[Integration]: + """Mock integrations with custom validator.""" + integrations = [] + + for domain in ("custom_validator_ok_1", "custom_validator_ok_2"): + + def gen_async_validate_config(domain): + schema = vol.Schema( + { + domain: vol.Schema( + { + vol.Required("host"): str, + vol.Optional("port", default=8080): int, + } + ) + }, + extra=vol.ALLOW_EXTRA, + ) + + async def async_validate_config( + hass: HomeAssistant, config: ConfigType + ) -> ConfigType: + """Validate config.""" + return schema(config) + + return async_validate_config + + integrations.append(mock_integration(hass, MockModule(domain))) + mock_platform( + hass, + f"{domain}.config", + Mock(async_validate_config=gen_async_validate_config(domain)), + ) + + for domain, exception in [ + ("custom_validator_bad_1", HomeAssistantError("broken")), + ("custom_validator_bad_2", ValueError("broken")), + ]: + integrations.append(mock_integration(hass, MockModule(domain))) + mock_platform( + hass, + f"{domain}.config", + Mock(async_validate_config=AsyncMock(side_effect=exception)), + ) + + +@pytest.fixture +async def mock_custom_validator_integrations_with_docs( + hass: HomeAssistant, +) -> list[Integration]: + """Mock integrations with custom validator.""" + integrations = [] + + for domain in ("custom_validator_ok_1", "custom_validator_ok_2"): + + def gen_async_validate_config(domain): + schema = vol.Schema( + { + domain: vol.Schema( + { + vol.Required("host"): str, + vol.Optional("port", default=8080): int, + } + ) + }, + extra=vol.ALLOW_EXTRA, + ) + + async def async_validate_config( + hass: HomeAssistant, config: ConfigType + ) -> ConfigType: + """Validate config.""" + return schema(config) + + return async_validate_config + + integrations.append( + mock_integration( + hass, + MockModule( + domain, + partial_manifest={ + "documentation": f"https://www.home-assistant.io/integrations/{domain}" + }, + ), + ) + ) + mock_platform( + hass, + f"{domain}.config", + Mock(async_validate_config=gen_async_validate_config(domain)), + ) + + for domain, exception in [ + ("custom_validator_bad_1", HomeAssistantError("broken")), + ("custom_validator_bad_2", ValueError("broken")), + ]: + integrations.append( + mock_integration( + hass, + MockModule( + domain, + partial_manifest={ + "documentation": f"https://www.home-assistant.io/integrations/{domain}" + }, + ), + ) + ) + mock_platform( + hass, + f"{domain}.config", + Mock(async_validate_config=AsyncMock(side_effect=exception)), + ) + + async def test_create_default_config(hass: HomeAssistant) -> None: """Test creation of default config.""" assert not os.path.isfile(YAML_PATH) @@ -1217,71 +1428,132 @@ async def test_component_config_exceptions( ) -> None: """Test unexpected exceptions validating component config.""" # Config validator + test_integration = Mock( + domain="test_domain", + get_platform=Mock( + return_value=Mock( + async_validate_config=AsyncMock(side_effect=ValueError("broken")) + ) + ), + ) assert ( - await config_util.async_process_component_config( - hass, - {}, - integration=Mock( - domain="test_domain", - get_platform=Mock( - return_value=Mock( - async_validate_config=AsyncMock( - side_effect=ValueError("broken") - ) - ) - ), - ), + await config_util.async_process_component_and_handle_errors( + hass, {}, integration=test_integration ) is None ) assert "ValueError: broken" in caplog.text assert "Unknown error calling test_domain config validator" in caplog.text + caplog.clear() + with pytest.raises(HomeAssistantError) as ex: + await config_util.async_process_component_and_handle_errors( + hass, {}, integration=test_integration, raise_on_failure=True + ) + assert "ValueError: broken" in caplog.text + assert "Unknown error calling test_domain config validator" in caplog.text + assert str(ex.value) == "Unknown error calling test_domain config validator" - # component.CONFIG_SCHEMA + test_integration = Mock( + domain="test_domain", + get_platform=Mock( + return_value=Mock( + async_validate_config=AsyncMock( + side_effect=HomeAssistantError("broken") + ) + ) + ), + get_component=Mock(return_value=Mock(spec=["PLATFORM_SCHEMA_BASE"])), + ) caplog.clear() assert ( - await config_util.async_process_component_config( - hass, - {}, - integration=Mock( - domain="test_domain", - get_platform=Mock(return_value=None), - get_component=Mock( - return_value=Mock( - CONFIG_SCHEMA=Mock(side_effect=ValueError("broken")) - ) - ), - ), + await config_util.async_process_component_and_handle_errors( + hass, {}, integration=test_integration, raise_on_failure=False + ) + is None + ) + assert "Invalid config for 'test_domain': broken" in caplog.text + with pytest.raises(HomeAssistantError) as ex: + await config_util.async_process_component_and_handle_errors( + hass, {}, integration=test_integration, raise_on_failure=True + ) + assert "Invalid config for 'test_domain': broken" in str(ex.value) + + # component.CONFIG_SCHEMA + caplog.clear() + test_integration = Mock( + domain="test_domain", + get_platform=Mock(return_value=None), + get_component=Mock( + return_value=Mock(CONFIG_SCHEMA=Mock(side_effect=ValueError("broken"))) + ), + ) + assert ( + await config_util.async_process_component_and_handle_errors( + hass, + {}, + integration=test_integration, + raise_on_failure=False, ) is None ) - assert "ValueError: broken" in caplog.text assert "Unknown error calling test_domain CONFIG_SCHEMA" in caplog.text + with pytest.raises(HomeAssistantError) as ex: + await config_util.async_process_component_and_handle_errors( + hass, + {}, + integration=test_integration, + raise_on_failure=True, + ) + assert "Unknown error calling test_domain CONFIG_SCHEMA" in caplog.text + assert str(ex.value) == "Unknown error calling test_domain CONFIG_SCHEMA" # component.PLATFORM_SCHEMA caplog.clear() - assert await config_util.async_process_component_config( + test_integration = Mock( + domain="test_domain", + get_platform=Mock(return_value=None), + get_component=Mock( + return_value=Mock( + spec=["PLATFORM_SCHEMA_BASE"], + PLATFORM_SCHEMA_BASE=Mock(side_effect=ValueError("broken")), + ) + ), + ) + assert await config_util.async_process_component_and_handle_errors( hass, {"test_domain": {"platform": "test_platform"}}, - integration=Mock( - domain="test_domain", - get_platform=Mock(return_value=None), - get_component=Mock( - return_value=Mock( - spec=["PLATFORM_SCHEMA_BASE"], - PLATFORM_SCHEMA_BASE=Mock(side_effect=ValueError("broken")), - ) - ), - ), + integration=test_integration, + raise_on_failure=False, ) == {"test_domain": []} assert "ValueError: broken" in caplog.text assert ( - "Unknown error validating test_platform platform config " - "with test_domain component platform schema" + "Unknown error validating config for test_platform platform " + "for test_domain component with PLATFORM_SCHEMA" ) in caplog.text + caplog.clear() + with pytest.raises(HomeAssistantError) as ex: + await config_util.async_process_component_and_handle_errors( + hass, + {"test_domain": {"platform": "test_platform"}}, + integration=test_integration, + raise_on_failure=True, + ) + assert ( + "Unknown error validating config for test_platform platform " + "for test_domain component with PLATFORM_SCHEMA" + ) in caplog.text + assert str(ex.value) == ( + "Unknown error validating config for test_platform platform " + "for test_domain component with PLATFORM_SCHEMA" + ) # platform.PLATFORM_SCHEMA caplog.clear() + test_integration = Mock( + domain="test_domain", + get_platform=Mock(return_value=None), + get_component=Mock(return_value=Mock(spec=["PLATFORM_SCHEMA_BASE"])), + ) with patch( "homeassistant.config.async_get_integration_with_requirements", return_value=Mock( # integration that owns platform @@ -1292,67 +1564,337 @@ async def test_component_config_exceptions( ) ), ): - assert await config_util.async_process_component_config( + assert await config_util.async_process_component_and_handle_errors( hass, {"test_domain": {"platform": "test_platform"}}, - integration=Mock( - domain="test_domain", - get_platform=Mock(return_value=None), - get_component=Mock(return_value=Mock(spec=["PLATFORM_SCHEMA_BASE"])), - ), + integration=test_integration, + raise_on_failure=False, ) == {"test_domain": []} assert "ValueError: broken" in caplog.text + assert ( + "Unknown error validating config for test_platform platform for test_domain" + " component with PLATFORM_SCHEMA" + ) in caplog.text + caplog.clear() + with pytest.raises(HomeAssistantError) as ex: + assert await config_util.async_process_component_and_handle_errors( + hass, + {"test_domain": {"platform": "test_platform"}}, + integration=test_integration, + raise_on_failure=True, + ) + assert ( + "Unknown error validating config for test_platform platform for test_domain" + " component with PLATFORM_SCHEMA" + ) in str(ex.value) + assert "ValueError: broken" in caplog.text assert ( "Unknown error validating config for test_platform platform for test_domain" " component with PLATFORM_SCHEMA" in caplog.text ) + # Test multiple platform failures + assert await config_util.async_process_component_and_handle_errors( + hass, + { + "test_domain": [ + {"platform": "test_platform1"}, + {"platform": "test_platform2"}, + ] + }, + integration=test_integration, + raise_on_failure=False, + ) == {"test_domain": []} + assert "ValueError: broken" in caplog.text + assert ( + "Unknown error validating config for test_platform1 platform " + "for test_domain component with PLATFORM_SCHEMA" + ) in caplog.text + assert ( + "Unknown error validating config for test_platform2 platform " + "for test_domain component with PLATFORM_SCHEMA" + ) in caplog.text + caplog.clear() + with pytest.raises(HomeAssistantError) as ex: + assert await config_util.async_process_component_and_handle_errors( + hass, + { + "test_domain": [ + {"platform": "test_platform1"}, + {"platform": "test_platform2"}, + ] + }, + integration=test_integration, + raise_on_failure=True, + ) + assert ( + "Failed to process component config for integration test_domain" + " due to multiple errors (2), check the logs for more information." + ) in str(ex.value) + assert "ValueError: broken" in caplog.text + assert ( + "Unknown error validating config for test_platform1 platform " + "for test_domain component with PLATFORM_SCHEMA" + ) in caplog.text + assert ( + "Unknown error validating config for test_platform2 platform " + "for test_domain component with PLATFORM_SCHEMA" + ) in caplog.text + + # get_platform("domain") raising on ImportError + caplog.clear() + test_integration = Mock( + domain="test_domain", + get_platform=Mock(return_value=None), + get_component=Mock(return_value=Mock(spec=["PLATFORM_SCHEMA_BASE"])), + ) + import_error = ImportError( + ("ModuleNotFoundError: No module named 'not_installed_something'"), + name="not_installed_something", + ) + with patch( + "homeassistant.config.async_get_integration_with_requirements", + return_value=Mock( # integration that owns platform + get_platform=Mock(side_effect=import_error) + ), + ): + assert await config_util.async_process_component_and_handle_errors( + hass, + {"test_domain": {"platform": "test_platform"}}, + integration=test_integration, + raise_on_failure=False, + ) == {"test_domain": []} + assert ( + "ImportError: ModuleNotFoundError: No module named " + "'not_installed_something'" in caplog.text + ) + caplog.clear() + with pytest.raises(HomeAssistantError) as ex: + assert await config_util.async_process_component_and_handle_errors( + hass, + {"test_domain": {"platform": "test_platform"}}, + integration=test_integration, + raise_on_failure=True, + ) + assert ( + "ImportError: ModuleNotFoundError: No module named " + "'not_installed_something'" in caplog.text + ) + assert ( + "Platform error: test_domain - ModuleNotFoundError: " + "No module named 'not_installed_something'" + ) in caplog.text + assert ( + "Platform error: test_domain - ModuleNotFoundError: " + "No module named 'not_installed_something'" + ) in str(ex.value) # get_platform("config") raising caplog.clear() + test_integration = Mock( + pkg_path="homeassistant.components.test_domain", + domain="test_domain", + get_platform=Mock( + side_effect=ImportError( + ("ModuleNotFoundError: No module named 'not_installed_something'"), + name="not_installed_something", + ) + ), + ) assert ( - await config_util.async_process_component_config( + await config_util.async_process_component_and_handle_errors( hass, {"test_domain": {}}, - integration=Mock( - pkg_path="homeassistant.components.test_domain", - domain="test_domain", - get_platform=Mock( - side_effect=ImportError( - ( - "ModuleNotFoundError: No module named" - " 'not_installed_something'" - ), - name="not_installed_something", - ) - ), - ), + integration=test_integration, + raise_on_failure=False, ) is None ) assert ( - "Error importing config platform test_domain: ModuleNotFoundError: No module" - " named 'not_installed_something'" in caplog.text + "Error importing config platform test_domain: ModuleNotFoundError: " + "No module named 'not_installed_something'" in caplog.text + ) + with pytest.raises(HomeAssistantError) as ex: + await config_util.async_process_component_and_handle_errors( + hass, + {"test_domain": {}}, + integration=test_integration, + raise_on_failure=True, + ) + assert ( + "Error importing config platform test_domain: ModuleNotFoundError: " + "No module named 'not_installed_something'" in caplog.text + ) + assert ( + "Error importing config platform test_domain: ModuleNotFoundError: " + "No module named 'not_installed_something'" in str(ex.value) ) # get_component raising caplog.clear() + test_integration = Mock( + pkg_path="homeassistant.components.test_domain", + domain="test_domain", + get_component=Mock( + side_effect=FileNotFoundError("No such file or directory: b'liblibc.a'") + ), + ) assert ( - await config_util.async_process_component_config( + await config_util.async_process_component_and_handle_errors( hass, {"test_domain": {}}, - integration=Mock( - pkg_path="homeassistant.components.test_domain", - domain="test_domain", - get_component=Mock( - side_effect=FileNotFoundError( - "No such file or directory: b'liblibc.a'" - ) - ), - ), + integration=test_integration, + raise_on_failure=False, ) is None ) assert "Unable to import test_domain: No such file or directory" in caplog.text + with pytest.raises(HomeAssistantError) as ex: + await config_util.async_process_component_and_handle_errors( + hass, + {"test_domain": {}}, + integration=test_integration, + raise_on_failure=True, + ) + assert "Unable to import test_domain: No such file or directory" in caplog.text + assert "Unable to import test_domain: No such file or directory" in str(ex.value) + + +@pytest.mark.parametrize( + ("exception_info_list", "error", "messages", "show_stack_trace", "translation_key"), + [ + ( + [ + config_util.ConfigExceptionInfo( + ImportError("bla"), + "component_import_err", + "test_domain", + {"test_domain": []}, + "https://example.com", + ) + ], + "bla", + ["Unable to import test_domain: bla", "bla"], + False, + "component_import_err", + ), + ( + [ + config_util.ConfigExceptionInfo( + HomeAssistantError("bla"), + "config_validation_err", + "test_domain", + {"test_domain": []}, + "https://example.com", + ) + ], + "bla", + [ + "Invalid config for 'test_domain': bla, " + "please check the docs at https://example.com", + "bla", + ], + True, + "config_validation_err", + ), + ( + [ + config_util.ConfigExceptionInfo( + vol.Invalid("bla", ["path"]), + "config_validation_err", + "test_domain", + {"test_domain": []}, + "https://example.com", + ) + ], + "bla @ data['path']", + [ + "Invalid config for 'test_domain': bla 'path', got None, " + "please check the docs at https://example.com", + "bla", + ], + False, + "config_validation_err", + ), + ( + [ + config_util.ConfigExceptionInfo( + vol.Invalid("bla", ["path"]), + "platform_config_validation_err", + "test_domain", + {"test_domain": []}, + "https://alt.example.com", + ) + ], + "bla @ data['path']", + [ + "Invalid config for 'test_domain': bla 'path', got None, " + "please check the docs at https://alt.example.com", + "bla", + ], + False, + "platform_config_validation_err", + ), + ( + [ + config_util.ConfigExceptionInfo( + ImportError("bla"), + "platform_component_load_err", + "test_domain", + {"test_domain": []}, + "https://example.com", + ) + ], + "bla", + ["Platform error: test_domain - bla", "bla"], + False, + "platform_component_load_err", + ), + ], +) +async def test_component_config_error_processing( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + error: str, + exception_info_list: list[config_util.ConfigExceptionInfo], + messages: list[str], + show_stack_trace: bool, + translation_key: str, +) -> None: + """Test component config error processing.""" + test_integration = Mock( + domain="test_domain", + documentation="https://example.com", + get_platform=Mock( + return_value=Mock( + async_validate_config=AsyncMock(side_effect=ValueError("broken")) + ) + ), + ) + with patch( + "homeassistant.config.async_process_component_config", + return_value=config_util.IntegrationConfigInfo(None, exception_info_list), + ), pytest.raises(ConfigValidationError) as ex: + await config_util.async_process_component_and_handle_errors( + hass, {}, test_integration, raise_on_failure=True + ) + records = [record for record in caplog.records if record.msg == messages[0]] + assert len(records) == 1 + assert (records[0].exc_info is not None) == show_stack_trace + assert str(ex.value) == messages[0] + assert ex.value.translation_key == translation_key + assert ex.value.translation_domain == "homeassistant" + assert ex.value.translation_placeholders["domain"] == "test_domain" + assert all(message in caplog.text for message in messages) + + caplog.clear() + with patch( + "homeassistant.config.async_process_component_config", + return_value=config_util.IntegrationConfigInfo(None, exception_info_list), + ): + await config_util.async_process_component_and_handle_errors( + hass, {}, test_integration + ) + assert all(message in caplog.text for message in messages) @pytest.mark.parametrize( @@ -1488,6 +2030,7 @@ async def test_component_config_validation_error( mock_iot_domain_integration: Integration, mock_non_adr_0007_integration: None, mock_adr_0007_integrations: list[Integration], + mock_custom_validator_integrations: list[Integration], snapshot: SnapshotAssertion, ) -> None: """Test schema error in component.""" @@ -1498,16 +2041,63 @@ async def test_component_config_validation_error( ) config = await config_util.async_hass_config_yaml(hass) - for domain in ["iot_domain", "adr_0007_1", "adr_0007_2", "adr_0007_3"]: - integration = await async_get_integration(hass, domain) - await config_util.async_process_component_config( + for domain_with_label in config: + integration = await async_get_integration( + hass, domain_with_label.partition(" ")[0] + ) + await config_util.async_process_component_and_handle_errors( hass, config, integration=integration, ) error_records = [ - record.message.replace(base_path, "") + { + "message": record.message, + "has_exc_info": bool(record.exc_info), + } + for record in caplog.get_records("call") + if record.levelno == logging.ERROR + ] + assert error_records == snapshot + + +@pytest.mark.parametrize( + "config_dir", + [ + "basic", + ], +) +async def test_component_config_validation_error_with_docs( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + config_dir: str, + mock_iot_domain_integration_with_docs: Integration, + mock_non_adr_0007_integration_with_docs: None, + mock_adr_0007_integrations_with_docs: list[Integration], + mock_custom_validator_integrations_with_docs: list[Integration], + snapshot: SnapshotAssertion, +) -> None: + """Test schema error in component.""" + + base_path = os.path.dirname(__file__) + hass.config.config_dir = os.path.join( + base_path, "fixtures", "core", "config", "component_validation", config_dir + ) + config = await config_util.async_hass_config_yaml(hass) + + for domain_with_label in config: + integration = await async_get_integration( + hass, domain_with_label.partition(" ")[0] + ) + await config_util.async_process_component_and_handle_errors( + hass, + config, + integration=integration, + ) + + error_records = [ + record.message for record in caplog.get_records("call") if record.levelno == logging.ERROR ] @@ -1535,7 +2125,47 @@ async def test_package_merge_error( await config_util.async_hass_config_yaml(hass) error_records = [ - record.message.replace(base_path, "") + record.message + for record in caplog.get_records("call") + if record.levelno == logging.ERROR + ] + assert error_records == snapshot + + +@pytest.mark.parametrize( + "error", + [ + FileNotFoundError("No such file or directory: b'liblibc.a'"), + ImportError( + ("ModuleNotFoundError: No module named 'not_installed_something'"), + name="not_installed_something", + ), + ], +) +@pytest.mark.parametrize( + "config_dir", + ["packages", "packages_include_dir_named"], +) +async def test_package_merge_exception( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + config_dir: str, + error: Exception, + snapshot: SnapshotAssertion, +) -> None: + """Test exception when merging packages.""" + base_path = os.path.dirname(__file__) + hass.config.config_dir = os.path.join( + base_path, "fixtures", "core", "config", "package_exceptions", config_dir + ) + with patch( + "homeassistant.config.async_get_integration_with_requirements", + side_effect=error, + ): + await config_util.async_hass_config_yaml(hass) + + error_records = [ + record.message for record in caplog.get_records("call") if record.levelno == logging.ERROR ] diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index a3c052971e3..f63972c79e8 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -40,8 +40,8 @@ from .common import ( MockPlatform, async_fire_time_changed, mock_config_flow, - mock_entity_platform, mock_integration, + mock_platform, ) from tests.common import async_get_persistent_notifications @@ -92,7 +92,7 @@ async def test_call_setup_entry(hass: HomeAssistant) -> None: async_migrate_entry=mock_migrate_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) with patch("homeassistant.config_entries.support_entry_unload", return_value=True): result = await async_setup_component(hass, "comp", {}) @@ -121,7 +121,7 @@ async def test_call_setup_entry_without_reload_support(hass: HomeAssistant) -> N async_migrate_entry=mock_migrate_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) with patch("homeassistant.config_entries.support_entry_unload", return_value=False): result = await async_setup_component(hass, "comp", {}) @@ -151,7 +151,7 @@ async def test_call_async_migrate_entry(hass: HomeAssistant) -> None: async_migrate_entry=mock_migrate_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) with patch("homeassistant.config_entries.support_entry_unload", return_value=True): result = await async_setup_component(hass, "comp", {}) @@ -181,7 +181,7 @@ async def test_call_async_migrate_entry_failure_false(hass: HomeAssistant) -> No async_migrate_entry=mock_migrate_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) result = await async_setup_component(hass, "comp", {}) assert result @@ -209,7 +209,7 @@ async def test_call_async_migrate_entry_failure_exception(hass: HomeAssistant) - async_migrate_entry=mock_migrate_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) result = await async_setup_component(hass, "comp", {}) assert result @@ -237,7 +237,7 @@ async def test_call_async_migrate_entry_failure_not_bool(hass: HomeAssistant) -> async_migrate_entry=mock_migrate_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) result = await async_setup_component(hass, "comp", {}) assert result @@ -259,7 +259,7 @@ async def test_call_async_migrate_entry_failure_not_supported( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) result = await async_setup_component(hass, "comp", {}) assert result @@ -311,10 +311,10 @@ async def test_remove_entry( async_remove_entry=mock_remove_entry, ), ) - mock_entity_platform( - hass, "light.test", MockPlatform(async_setup_entry=mock_setup_entry_platform) + mock_platform( + hass, "test.light", MockPlatform(async_setup_entry=mock_setup_entry_platform) ) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) MockConfigEntry(domain="test_other", entry_id="test1").add_to_manager(manager) entry = MockConfigEntry(domain="test", entry_id="test2") @@ -371,7 +371,7 @@ async def test_remove_entry_cancels_reauth( mock_setup_entry = AsyncMock(side_effect=ConfigEntryAuthFailed()) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) entry.add_to_hass(hass) await entry.async_setup(hass) @@ -510,7 +510,7 @@ async def test_add_entry_calls_setup_entry( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -610,7 +610,7 @@ async def test_saving_and_loading(hass: HomeAssistant) -> None: "test", async_setup_entry=lambda *args: AsyncMock(return_value=True) ), ) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -721,7 +721,7 @@ async def test_discovery_notification( ) -> None: """Test that we create/dismiss a notification when source is discovery.""" mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) with patch.dict(config_entries.HANDLERS): @@ -775,7 +775,7 @@ async def test_discovery_notification( async def test_reauth_notification(hass: HomeAssistant) -> None: """Test that we create/dismiss a notification when source is reauth.""" mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) with patch.dict(config_entries.HANDLERS): @@ -842,7 +842,7 @@ async def test_reauth_notification(hass: HomeAssistant) -> None: async def test_discovery_notification_not_created(hass: HomeAssistant) -> None: """Test that we not create a notification when discovery is aborted.""" mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -940,7 +940,7 @@ async def test_setup_raise_not_ready( side_effect=ConfigEntryNotReady("The internet connection is offline") ) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) with patch("homeassistant.config_entries.async_call_later") as mock_call: await entry.async_setup(hass) @@ -978,7 +978,7 @@ async def test_setup_raise_not_ready_from_exception( mock_setup_entry = AsyncMock(side_effect=config_entry_exception) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) with patch("homeassistant.config_entries.async_call_later") as mock_call: await entry.async_setup(hass) @@ -996,7 +996,7 @@ async def test_setup_retrying_during_unload(hass: HomeAssistant) -> None: mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) with patch("homeassistant.config_entries.async_call_later") as mock_call: await entry.async_setup(hass) @@ -1018,7 +1018,7 @@ async def test_setup_retrying_during_unload_before_started(hass: HomeAssistant) mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -1043,7 +1043,7 @@ async def test_setup_does_not_retry_during_shutdown(hass: HomeAssistant) -> None mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) @@ -1081,7 +1081,7 @@ async def test_create_entry_options( "comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -1114,7 +1114,7 @@ async def test_entry_options( ) -> None: """Test that we can set options on an entry.""" mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) entry = MockConfigEntry(domain="test", data={"first": True}, options=None) entry.add_to_manager(manager) @@ -1152,7 +1152,7 @@ async def test_entry_options_abort( ) -> None: """Test that we can abort options flow.""" mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) entry = MockConfigEntry(domain="test", data={"first": True}, options=None) entry.add_to_manager(manager) @@ -1186,7 +1186,7 @@ async def test_entry_options_unknown_config_entry( ) -> None: """Test that we can abort options flow.""" mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) class TestFlow: """Test flow.""" @@ -1218,7 +1218,7 @@ async def test_entry_setup_succeed( hass, MockModule("comp", async_setup=mock_setup, async_setup_entry=mock_setup_entry), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) assert await manager.async_setup(entry.entry_id) assert len(mock_setup.mock_calls) == 1 @@ -1350,7 +1350,7 @@ async def test_entry_reload_succeed( async_unload_entry=async_unload_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) assert await manager.async_reload(entry.entry_id) assert len(async_unload_entry.mock_calls) == 1 @@ -1389,7 +1389,7 @@ async def test_entry_reload_not_loaded( async_unload_entry=async_unload_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) assert await manager.async_reload(entry.entry_id) assert len(async_unload_entry.mock_calls) == 0 @@ -1458,7 +1458,7 @@ async def test_entry_disable_succeed( async_unload_entry=async_unload_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) # Disable assert await manager.async_set_disabled_by( @@ -1495,7 +1495,7 @@ async def test_entry_disable_without_reload_support( async_setup_entry=async_setup_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) # Disable assert not await manager.async_set_disabled_by( @@ -1536,7 +1536,7 @@ async def test_entry_enable_without_reload_support( async_setup_entry=async_setup_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) # Enable assert await manager.async_set_disabled_by(entry.entry_id, None) @@ -1582,7 +1582,7 @@ async def test_init_custom_integration_with_missing_handler( hass, MockModule("hue"), ) - mock_entity_platform(hass, "config_flow.hue", None) + mock_platform(hass, "hue.config_flow", None) with pytest.raises(data_entry_flow.UnknownHandler), patch( "homeassistant.loader.async_get_integration", return_value=integration, @@ -1634,7 +1634,7 @@ async def test_reload_entry_entity_registry_works( async_unload_entry=mock_unload_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) # Only changing disabled_by should update trigger entity_entry = entity_registry.async_get_or_create( @@ -1676,7 +1676,7 @@ async def test_unique_id_persisted( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -1724,7 +1724,7 @@ async def test_unique_id_existing_entry( async_remove_entry=async_remove_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -1772,7 +1772,7 @@ async def test_entry_id_existing_entry( hass, MockModule("comp"), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -1811,7 +1811,7 @@ async def test_unique_id_update_existing_entry_without_reload( hass, MockModule("comp"), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -1857,7 +1857,7 @@ async def test_unique_id_update_existing_entry_with_reload( hass, MockModule("comp"), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) updates = {"host": "1.1.1.1"} class TestFlow(config_entries.ConfigFlow): @@ -1923,7 +1923,7 @@ async def test_unique_id_from_discovery_in_setup_retry( hass, MockModule("comp"), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -1991,7 +1991,7 @@ async def test_unique_id_not_update_existing_entry( hass, MockModule("comp"), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2025,7 +2025,7 @@ async def test_unique_id_in_progress( ) -> None: """Test that we abort if there is already a flow in progress with same unique id.""" mock_integration(hass, MockModule("comp")) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2061,7 +2061,7 @@ async def test_finish_flow_aborts_progress( hass, MockModule("comp", async_setup_entry=AsyncMock(return_value=True)), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2100,7 +2100,7 @@ async def test_unique_id_ignore( """Test that we can ignore flows that are in progress and have a unique ID.""" async_setup_entry = AsyncMock(return_value=False) mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2157,7 +2157,7 @@ async def test_manual_add_overrides_ignored_entry( hass, MockModule("comp"), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2204,7 +2204,7 @@ async def test_manual_add_overrides_ignored_entry_singleton( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2245,7 +2245,7 @@ async def test__async_current_entries_does_not_skip_ignore_non_user( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2282,7 +2282,7 @@ async def test__async_current_entries_explicit_skip_ignore( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2323,7 +2323,7 @@ async def test__async_current_entries_explicit_include_ignore( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2351,7 +2351,7 @@ async def test_unignore_step_form( """Test that we can ignore flows that are in progress and have a unique ID, then rediscover them.""" async_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2398,7 +2398,7 @@ async def test_unignore_create_entry( """Test that we can ignore flows that are in progress and have a unique ID, then rediscover them.""" async_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2448,7 +2448,7 @@ async def test_unignore_default_impl( """Test that resdicovery is a no-op by default.""" async_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2482,7 +2482,7 @@ async def test_partial_flows_hidden( """Test that flows that don't have a cur_step and haven't finished initing are hidden.""" async_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) # A flag to test our assertion that `async_step_discovery` was called and is in its blocked state # This simulates if the step was e.g. doing network i/o @@ -2562,7 +2562,7 @@ async def test_async_setup_init_entry( "comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2616,7 +2616,7 @@ async def test_async_setup_init_entry_completes_before_loaded_event_fires( "comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2682,7 +2682,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: async_setup_entry=mock_async_setup_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2734,7 +2734,7 @@ async def test_flow_with_default_discovery( hass, MockModule("comp", async_setup_entry=AsyncMock(return_value=True)), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2781,7 +2781,7 @@ async def test_flow_with_default_discovery_with_unique_id( ) -> None: """Test discovery flow using the default discovery is ignored when unique ID is set.""" mock_integration(hass, MockModule("comp")) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2818,7 +2818,7 @@ async def test_default_discovery_abort_existing_entries( entry.add_to_hass(hass) mock_integration(hass, MockModule("comp")) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2838,7 +2838,7 @@ async def test_default_discovery_in_progress( ) -> None: """Test that a flow using default discovery can only be triggered once.""" mock_integration(hass, MockModule("comp")) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2878,7 +2878,7 @@ async def test_default_discovery_abort_on_new_unique_flow( ) -> None: """Test that a flow using default discovery is aborted when a second flow with unique ID is created.""" mock_integration(hass, MockModule("comp")) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2920,7 +2920,7 @@ async def test_default_discovery_abort_on_user_flow_complete( ) -> None: """Test that a flow using default discovery is aborted when a second flow completes.""" mock_integration(hass, MockModule("comp")) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -2977,7 +2977,7 @@ async def test_flow_same_device_multiple_sources( hass, MockModule("comp", async_setup_entry=AsyncMock(return_value=True)), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -3088,7 +3088,7 @@ async def test_entry_reload_calls_on_unload_listeners( async_unload_entry=async_unload_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) mock_unload_callback = Mock() @@ -3119,7 +3119,7 @@ async def test_setup_raise_entry_error( side_effect=ConfigEntryError("Incompatible firmware version") ) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -3156,7 +3156,7 @@ async def test_setup_raise_entry_error_from_first_coordinator_update( return True mock_integration(hass, MockModule("test", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -3193,7 +3193,7 @@ async def test_setup_not_raise_entry_error_from_future_coordinator_update( return True mock_integration(hass, MockModule("test", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -3215,7 +3215,7 @@ async def test_setup_raise_auth_failed( side_effect=ConfigEntryAuthFailed("The password is no longer valid") ) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -3267,7 +3267,7 @@ async def test_setup_raise_auth_failed_from_first_coordinator_update( return True mock_integration(hass, MockModule("test", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -3316,7 +3316,7 @@ async def test_setup_raise_auth_failed_from_future_coordinator_update( return True mock_integration(hass, MockModule("test", async_setup_entry=async_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -3361,7 +3361,7 @@ async def test_setup_retrying_during_shutdown(hass: HomeAssistant) -> None: mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) with patch("homeassistant.helpers.event.async_call_later") as mock_call: await entry.async_setup(hass) @@ -3444,7 +3444,7 @@ async def test__async_abort_entries_match( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -3530,7 +3530,7 @@ async def test__async_abort_entries_match_options_flow( mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("test_abort", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test_abort", None) + mock_platform(hass, "test_abort.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -3649,7 +3649,7 @@ async def test_entry_reload_concurrency( async_unload_entry=_async_unload_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) tasks = [] for _ in range(15): tasks.append(asyncio.create_task(manager.async_reload(entry.entry_id))) @@ -3689,7 +3689,7 @@ async def test_unique_id_update_while_setup_in_progress( async_unload_entry=mock_unload_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) updates = {"host": "1.1.1.1"} hass.async_create_task(hass.config_entries.async_reload(entry.entry_id)) @@ -3752,7 +3752,7 @@ async def test_reauth(hass: HomeAssistant) -> None: mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -3812,7 +3812,7 @@ async def test_get_active_flows(hass: HomeAssistant) -> None: entry = MockConfigEntry(title="test_title", domain="test") mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) await entry.async_setup(hass) await hass.async_block_till_done() @@ -3845,7 +3845,7 @@ async def test_async_wait_component_dynamic(hass: HomeAssistant) -> None: mock_setup_entry = AsyncMock(return_value=True) mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) entry.add_to_hass(hass) @@ -3876,7 +3876,7 @@ async def test_async_wait_component_startup(hass: HomeAssistant) -> None: hass, MockModule("test", async_setup=mock_setup, async_setup_entry=mock_setup_entry), ) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) entry.add_to_hass(hass) @@ -3938,7 +3938,7 @@ async def test_initializing_flows_canceled_on_shutdown( await asyncio.sleep(1) mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) with patch.dict( config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} @@ -4010,7 +4010,7 @@ async def test_preview_supported( preview_calls.append(None) mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) assert len(preview_calls) == 0 @@ -4046,7 +4046,7 @@ async def test_preview_not_supported( raise NotImplementedError mock_integration(hass, MockModule("test")) - mock_entity_platform(hass, "config_flow.test", None) + mock_platform(hass, "test.config_flow", None) with patch.dict( config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 4fa10b92706..fd01beed9ab 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -31,9 +31,7 @@ async def test_requirement_installed_in_venv(hass: HomeAssistant) -> None: "homeassistant.util.package.is_virtual_env", return_value=True ), patch("homeassistant.util.package.is_docker_env", return_value=False), patch( "homeassistant.util.package.install_package", return_value=True - ) as mock_install, patch.dict( - os.environ, env_without_wheel_links(), clear=True - ): + ) as mock_install, patch.dict(os.environ, env_without_wheel_links(), clear=True): hass.config.skip_pip = False mock_integration(hass, MockModule("comp", requirements=["package==0.0.1"])) assert await setup.async_setup_component(hass, "comp", {}) @@ -51,9 +49,7 @@ async def test_requirement_installed_in_deps(hass: HomeAssistant) -> None: "homeassistant.util.package.is_virtual_env", return_value=False ), patch("homeassistant.util.package.is_docker_env", return_value=False), patch( "homeassistant.util.package.install_package", return_value=True - ) as mock_install, patch.dict( - os.environ, env_without_wheel_links(), clear=True - ): + ) as mock_install, patch.dict(os.environ, env_without_wheel_links(), clear=True): hass.config.skip_pip = False mock_integration(hass, MockModule("comp", requirements=["package==0.0.1"])) assert await setup.async_setup_component(hass, "comp", {}) @@ -369,7 +365,7 @@ async def test_install_with_wheels_index(hass: HomeAssistant) -> None: ), patch("homeassistant.util.package.install_package") as mock_inst, patch.dict( os.environ, {"WHEELS_LINKS": "https://wheels.hass.io/test"} ), patch( - "os.path.dirname" + "os.path.dirname", ) as mock_dir: mock_dir.return_value = "ha_package_path" assert await setup.async_setup_component(hass, "comp", {}) @@ -391,9 +387,7 @@ async def test_install_on_docker(hass: HomeAssistant) -> None: "homeassistant.util.package.is_docker_env", return_value=True ), patch("homeassistant.util.package.install_package") as mock_inst, patch( "os.path.dirname" - ) as mock_dir, patch.dict( - os.environ, env_without_wheel_links(), clear=True - ): + ) as mock_dir, patch.dict(os.environ, env_without_wheel_links(), clear=True): mock_dir.return_value = "ha_package_path" assert await setup.async_setup_component(hass, "comp", {}) assert "comp" in hass.config.components diff --git a/tests/test_runner.py b/tests/test_runner.py index 5fe5c2881ff..3b06e3b64dc 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -75,7 +75,7 @@ def test_run_executor_shutdown_throws( "homeassistant.runner.InterruptibleThreadPoolExecutor.shutdown", side_effect=RuntimeError, ) as mock_shutdown, patch( - "homeassistant.core.HomeAssistant.async_run" + "homeassistant.core.HomeAssistant.async_run", ) as mock_run: runner.run(default_config) diff --git a/tests/test_setup.py b/tests/test_setup.py index eb4c645ecb1..00bb3fa2a2d 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -23,8 +23,8 @@ from .common import ( MockModule, MockPlatform, assert_setup_component, - mock_entity_platform, mock_integration, + mock_platform, ) @@ -90,9 +90,9 @@ async def test_validate_platform_config( hass, MockModule("platform_conf", platform_schema_base=platform_schema_base), ) - mock_entity_platform( + mock_platform( hass, - "platform_conf.whatever", + "whatever.platform_conf", MockPlatform(platform_schema=platform_schema), ) @@ -156,9 +156,9 @@ async def test_validate_platform_config_2( ), ) - mock_entity_platform( + mock_platform( hass, - "platform_conf.whatever", + "whatever.platform_conf", MockPlatform("whatever", platform_schema=platform_schema), ) @@ -185,9 +185,9 @@ async def test_validate_platform_config_3( hass, MockModule("platform_conf", platform_schema=component_schema) ) - mock_entity_platform( + mock_platform( hass, - "platform_conf.whatever", + "whatever.platform_conf", MockPlatform("whatever", platform_schema=platform_schema), ) @@ -213,9 +213,9 @@ async def test_validate_platform_config_4(hass: HomeAssistant) -> None: MockModule("platform_conf", platform_schema_base=component_schema), ) - mock_entity_platform( + mock_platform( hass, - "platform_conf.whatever", + "whatever.platform_conf", MockPlatform(platform_schema=platform_schema), ) @@ -350,7 +350,7 @@ async def test_component_setup_with_validation_and_dependency( MockModule("platform_a", setup=config_check_setup, dependencies=["comp_a"]), ) - mock_entity_platform(hass, "switch.platform_a", platform) + mock_platform(hass, "platform_a.switch", platform) await setup.async_setup_component( hass, @@ -367,13 +367,15 @@ async def test_platform_specific_config_validation(hass: HomeAssistant) -> None: mock_setup = Mock(spec_set=True) - mock_entity_platform( + mock_platform( hass, - "switch.platform_a", + "platform_a.switch", MockPlatform(platform_schema=platform_schema, setup_platform=mock_setup), ) - with assert_setup_component(0, "switch"): + with assert_setup_component(0, "switch"), patch( + "homeassistant.setup.async_notify_setup_error" + ) as mock_notify: assert await setup.async_setup_component( hass, "switch", @@ -381,11 +383,14 @@ async def test_platform_specific_config_validation(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert mock_setup.call_count == 0 + assert len(mock_notify.mock_calls) == 1 hass.data.pop(setup.DATA_SETUP) hass.config.components.remove("switch") - with assert_setup_component(0): + with assert_setup_component(0), patch( + "homeassistant.setup.async_notify_setup_error" + ) as mock_notify: assert await setup.async_setup_component( hass, "switch", @@ -399,11 +404,14 @@ async def test_platform_specific_config_validation(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert mock_setup.call_count == 0 + assert len(mock_notify.mock_calls) == 1 hass.data.pop(setup.DATA_SETUP) hass.config.components.remove("switch") - with assert_setup_component(1, "switch"): + with assert_setup_component(1, "switch"), patch( + "homeassistant.setup.async_notify_setup_error" + ) as mock_notify: assert await setup.async_setup_component( hass, "switch", @@ -411,6 +419,7 @@ async def test_platform_specific_config_validation(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert mock_setup.call_count == 1 + assert len(mock_notify.mock_calls) == 0 async def test_disable_component_if_invalid_return(hass: HomeAssistant) -> None: @@ -523,7 +532,7 @@ async def test_platform_error_slow_setup( result = await setup.async_setup_component(hass, "test_component1", {}) assert len(called) == 1 assert not result - assert "test_component1 is taking longer than 0.1 seconds" in caplog.text + assert "'test_component1' is taking longer than 0.1 seconds" in caplog.text async def test_when_setup_already_loaded(hass: HomeAssistant) -> None: @@ -618,7 +627,7 @@ async def test_parallel_entry_setup(hass: HomeAssistant, mock_handlers) -> None: async_setup_entry=mock_async_setup_entry, ), ) - mock_entity_platform(hass, "config_flow.comp", None) + mock_platform(hass, "comp.config_flow", None) await setup.async_setup_component(hass, "comp", {}) assert calls == [1, 2, 1, 2] @@ -653,7 +662,7 @@ async def test_integration_logs_is_custom( ): result = await setup.async_setup_component(hass, "test_component1", {}) assert not result - assert "Setup failed for custom integration test_component1: Boom" in caplog.text + assert "Setup failed for custom integration 'test_component1': Boom" in caplog.text async def test_async_get_loaded_integrations(hass: HomeAssistant) -> None: @@ -735,7 +744,7 @@ async def test_setup_config_entry_from_yaml( ) -> None: """Test attempting to setup an integration which only supports config_entries.""" expected_warning = ( - "The test_integration_only_entry integration does not support YAML setup, " + "The 'test_integration_only_entry' integration does not support YAML setup, " "please remove it from your configuration" ) diff --git a/tests/test_util/__init__.py b/tests/test_util/__init__.py index b8499675ea2..fe2c2c640e5 100644 --- a/tests/test_util/__init__.py +++ b/tests/test_util/__init__.py @@ -1 +1,35 @@ -"""Tests for the test utilities.""" +"""Test utilities.""" +from collections.abc import Awaitable, Callable + +from aiohttp.web import Application, Request, StreamResponse, middleware + + +def mock_real_ip(app: Application) -> Callable[[str], None]: + """Inject middleware to mock real IP. + + Returns a function to set the real IP. + """ + ip_to_mock: str | None = None + + def set_ip_to_mock(value: str): + nonlocal ip_to_mock + ip_to_mock = value + + @middleware + async def mock_real_ip( + request: Request, handler: Callable[[Request], Awaitable[StreamResponse]] + ) -> StreamResponse: + """Mock Real IP middleware.""" + nonlocal ip_to_mock + + request = request.clone(remote=ip_to_mock) + + return await handler(request) + + async def real_ip_startup(app): + """Startup of real ip.""" + app.middlewares.insert(0, mock_real_ip) + + app.on_startup.append(real_ip_startup) + + return set_ip_to_mock diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index ac874fcc45c..4f2518253ff 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -280,6 +280,12 @@ class AiohttpClientMockResponse: def close(self): """Mock close.""" + async def wait_for_close(self): + """Wait until all requests are done. + + Do nothing as we are mocking. + """ + @property def response(self): """Property method to expose the response to other read methods.""" diff --git a/tests/testing_config/custom_components/test/fan.py b/tests/testing_config/custom_components/test/fan.py new file mode 100644 index 00000000000..133f372f4fa --- /dev/null +++ b/tests/testing_config/custom_components/test/fan.py @@ -0,0 +1,64 @@ +"""Provide a mock fan platform. + +Call init before using it in your tests to ensure clean test data. +""" +from homeassistant.components.fan import FanEntity, FanEntityFeature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType + +from tests.common import MockEntity + +ENTITIES = {} + + +def init(empty=False): + """Initialize the platform with entities.""" + global ENTITIES + + ENTITIES = ( + {} + if empty + else { + "support_preset_mode": MockFan( + name="Support fan with preset_mode support", + supported_features=FanEntityFeature.PRESET_MODE, + unique_id="unique_support_preset_mode", + preset_modes=["auto", "eco"], + ) + } + ) + + +async def async_setup_platform( + hass: HomeAssistant, + config: ConfigType, + async_add_entities_callback: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +): + """Return mock entities.""" + async_add_entities_callback(list(ENTITIES.values())) + + +class MockFan(MockEntity, FanEntity): + """Mock Fan class.""" + + @property + def preset_mode(self) -> str | None: + """Return preset mode.""" + return self._handle("preset_mode") + + @property + def preset_modes(self) -> list[str] | None: + """Return preset mode.""" + return self._handle("preset_modes") + + @property + def supported_features(self): + """Return the class of this fan.""" + return self._handle("supported_features") + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set preset mode.""" + self._attr_preset_mode = preset_mode + await self.async_update_ha_state() diff --git a/tests/util/test_aiohttp.py b/tests/util/test_aiohttp.py index bfdc3c3e949..ada0269ac0e 100644 --- a/tests/util/test_aiohttp.py +++ b/tests/util/test_aiohttp.py @@ -1,5 +1,4 @@ """Test aiohttp request helper.""" -import sys from aiohttp import web @@ -50,22 +49,11 @@ def test_serialize_text() -> None: def test_serialize_body_str() -> None: """Test serializing a response with a str as body.""" response = web.Response(status=201, body="Hello") - # TODO: Remove version check with aiohttp 3.9.0 - if sys.version_info >= (3, 12): - assert aiohttp.serialize_response(response) == { - "status": 201, - "body": "Hello", - "headers": {"Content-Type": "text/plain; charset=utf-8"}, - } - else: - assert aiohttp.serialize_response(response) == { - "status": 201, - "body": "Hello", - "headers": { - "Content-Length": "5", - "Content-Type": "text/plain; charset=utf-8", - }, - } + assert aiohttp.serialize_response(response) == { + "status": 201, + "body": "Hello", + "headers": {"Content-Type": "text/plain; charset=utf-8"}, + } def test_serialize_body_None() -> None: diff --git a/tests/util/test_color.py b/tests/util/test_color.py index 7c5e959aabc..a7e6ba9ab46 100644 --- a/tests/util/test_color.py +++ b/tests/util/test_color.py @@ -270,6 +270,15 @@ def test_color_rgbw_to_rgb() -> None: assert color_util.color_rgbw_to_rgb(0, 0, 0, 127) == (127, 127, 127) +def test_color_xy_to_temperature() -> None: + """Test color_xy_to_temperature.""" + assert color_util.color_xy_to_temperature(0.5119, 0.4147) == 2136 + assert color_util.color_xy_to_temperature(0.368, 0.3686) == 4302 + assert color_util.color_xy_to_temperature(0.4448, 0.4066) == 2893 + assert color_util.color_xy_to_temperature(0.1, 0.8) == 8645 + assert color_util.color_xy_to_temperature(0.5, 0.4) == 2140 + + def test_color_rgb_to_hex() -> None: """Test color_rgb_to_hex.""" assert color_util.color_rgb_to_hex(255, 255, 255) == "ffffff" diff --git a/tests/util/test_executor.py b/tests/util/test_executor.py index 076864c65c4..d7731a44b7d 100644 --- a/tests/util/test_executor.py +++ b/tests/util/test_executor.py @@ -88,6 +88,10 @@ async def test_overall_timeout_reached(caplog: pytest.LogCaptureFixture) -> None iexecutor.shutdown() finish = time.monotonic() - assert finish - start < 1.3 + # Idealy execution time (finish - start) should be < 1.2 sec. + # CI tests might not run in an ideal environment and timing might + # not be accurate, so we let this test pass + # if the duration is below 3 seconds. + assert finish - start < 3.0 iexecutor.shutdown() diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 990956ec908..3a2d9b3734d 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -1,13 +1,15 @@ """Test Home Assistant yaml loader.""" +from collections.abc import Generator import importlib import io import os import pathlib from typing import Any import unittest -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest +import voluptuous as vol import yaml as pyyaml from homeassistant.config import YAML_CONFIG_FILE, load_yaml_config_file @@ -584,6 +586,61 @@ async def test_loading_actual_file_with_syntax_error( await hass.async_add_executor_job(load_yaml_config_file, fixture_path) +@pytest.fixture +def mock_integration_frame() -> Generator[Mock, None, None]: + """Mock as if we're calling code from inside an integration.""" + correct_frame = Mock( + filename="/home/paulus/.homeassistant/custom_components/hue/light.py", + lineno="23", + line="self.light.is_on", + ) + with patch( + "homeassistant.helpers.frame.extract_stack", + return_value=[ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + correct_frame, + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ], + ): + yield correct_frame + + +@pytest.mark.parametrize( + ("loader_class", "new_class"), + [ + (yaml.loader.SafeLoader, "FastSafeLoader"), + ( + yaml.loader.SafeLineLoader, + "PythonSafeLoader", + ), + ], +) +async def test_deprecated_loaders( + hass: HomeAssistant, + mock_integration_frame: Mock, + caplog: pytest.LogCaptureFixture, + loader_class, + new_class: str, +) -> None: + """Test instantiating the deprecated yaml loaders logs a warning.""" + with pytest.raises(TypeError), patch( + "homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set() + ): + loader_class() + assert ( + f"{loader_class.__name__} was called from hue, this is a deprecated class. " + f"Use {new_class} instead" + ) in caplog.text + + def test_string_annotated(try_both_loaders) -> None: """Test strings are annotated with file + line.""" conf = ( @@ -615,3 +672,20 @@ def test_string_annotated(try_both_loaders) -> None: getattr(value, "__config_file__", None) == expected_annotations[key][1][0] ) assert getattr(value, "__line__", None) == expected_annotations[key][1][1] + + +def test_string_used_as_vol_schema(try_both_loaders) -> None: + """Test the subclassed strings can be used in voluptuous schemas.""" + conf = "wanted_data:\n key_1: value_1\n key_2: value_2\n" + with io.StringIO(conf) as file: + doc = yaml_loader.parse_yaml(file) + + # Test using the subclassed strings in a schema + schema = vol.Schema( + {vol.Required(key): value for key, value in doc["wanted_data"].items()}, + ) + # Test using the subclassed strings when validating a schema + schema(doc["wanted_data"]) + schema({"key_1": "value_1", "key_2": "value_2"}) + with pytest.raises(vol.Invalid): + schema({"key_1": "value_2", "key_2": "value_1"})