diff --git a/.coveragerc b/.coveragerc index ceff3384202..1ccb9e461df 100644 --- a/.coveragerc +++ b/.coveragerc @@ -361,6 +361,8 @@ omit = homeassistant/components/environment_canada/weather.py homeassistant/components/envisalink/* homeassistant/components/ephember/climate.py + homeassistant/components/epic_games_store/__init__.py + homeassistant/components/epic_games_store/coordinator.py homeassistant/components/epion/__init__.py homeassistant/components/epion/coordinator.py homeassistant/components/epion/sensor.py @@ -739,6 +741,7 @@ omit = homeassistant/components/lutron/binary_sensor.py homeassistant/components/lutron/cover.py homeassistant/components/lutron/entity.py + homeassistant/components/lutron/event.py homeassistant/components/lutron/fan.py homeassistant/components/lutron/light.py homeassistant/components/lutron/switch.py @@ -983,6 +986,7 @@ omit = homeassistant/components/orvibo/switch.py homeassistant/components/osoenergy/__init__.py homeassistant/components/osoenergy/const.py + homeassistant/components/osoenergy/sensor.py homeassistant/components/osoenergy/water_heater.py homeassistant/components/osramlightify/light.py homeassistant/components/otp/sensor.py @@ -1154,8 +1158,10 @@ omit = homeassistant/components/roborock/coordinator.py homeassistant/components/rocketchat/notify.py homeassistant/components/romy/__init__.py + homeassistant/components/romy/binary_sensor.py homeassistant/components/romy/coordinator.py homeassistant/components/romy/entity.py + homeassistant/components/romy/sensor.py homeassistant/components/romy/vacuum.py homeassistant/components/roomba/__init__.py homeassistant/components/roomba/binary_sensor.py @@ -1405,11 +1411,6 @@ omit = homeassistant/components/tado/water_heater.py homeassistant/components/tami4/button.py homeassistant/components/tank_utility/sensor.py - homeassistant/components/tankerkoenig/__init__.py - homeassistant/components/tankerkoenig/binary_sensor.py - homeassistant/components/tankerkoenig/coordinator.py - homeassistant/components/tankerkoenig/entity.py - homeassistant/components/tankerkoenig/sensor.py homeassistant/components/tapsaff/binary_sensor.py homeassistant/components/tautulli/__init__.py homeassistant/components/tautulli/coordinator.py diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index f02a8bacce8..a72c4e75cfe 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -27,7 +27,7 @@ jobs: publish: ${{ steps.version.outputs.publish }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 with: fetch-depth: 0 @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: translations path: translations.tar.gz @@ -90,7 +90,7 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Download nightly wheels of frontend if: needs.init.outputs.channel == 'dev' @@ -175,7 +175,7 @@ jobs: sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt - name: Download translations - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: name: translations @@ -242,7 +242,7 @@ jobs: - green steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set build additional args run: | @@ -279,7 +279,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Initialize git uses: home-assistant/actions/helpers/git-init@master @@ -320,7 +320,7 @@ jobs: registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"] steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Install Cosign uses: sigstore/cosign-installer@v3.4.0 @@ -450,7 +450,7 @@ jobs: if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true' steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 @@ -458,7 +458,7 @@ jobs: python-version: ${{ env.DEFAULT_PYTHON }} - name: Download translations - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: name: translations diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a5bafa0c52d..580aba9752c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -33,10 +33,10 @@ on: type: boolean env: - CACHE_VERSION: 7 + CACHE_VERSION: 8 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 8 - HA_SHORT_VERSION: "2024.5" + HA_SHORT_VERSION: "2024.6" DEFAULT_PYTHON: "3.12" ALL_PYTHON_VERSIONS: "['3.12']" # 10.3 is the oldest supported version @@ -89,7 +89,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Generate partial Python venv restore key id: generate_python_cache_key run: >- @@ -97,7 +97,8 @@ jobs: hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_all.txt') }}-${{ - hashFiles('homeassistant/package_constraints.txt') }}" >> $GITHUB_OUTPUT + hashFiles('homeassistant/package_constraints.txt') }}-${{ + hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT - name: Generate partial pre-commit restore key id: generate_pre-commit_cache_key run: >- @@ -223,7 +224,7 @@ jobs: - info steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -269,7 +270,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -309,7 +310,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -348,7 +349,7 @@ jobs: - pre-commit steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 id: python @@ -442,7 +443,7 @@ jobs: python-version: ${{ fromJSON(needs.info.outputs.python_versions) }} steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -451,8 +452,10 @@ jobs: check-latest: true - name: Generate partial uv restore key id: generate-uv-key - run: >- - echo "key=uv-${{ env.UV_CACHE_VERSION }}-${{ + run: | + uv_version=$(cat requirements_test.txt | grep uv | cut -d '=' -f 3) + echo "version=${uv_version}" >> $GITHUB_OUTPUT + echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{ env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore base Python virtual environment id: cache-venv @@ -472,10 +475,13 @@ jobs: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ steps.generate-uv-key.outputs.key }} restore-keys: | - ${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{ env.UV_CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}- + ${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{ + env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{ + env.HA_SHORT_VERSION }}- - name: Install additional OS dependencies if: steps.cache-venv.outputs.cache-hit != 'true' run: | + sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo apt-get update sudo apt-get -y install \ bluez \ @@ -497,8 +503,9 @@ jobs: python --version pip install "$(grep '^uv' < requirements_test.txt)" uv pip install -U "pip>=21.3.1" setuptools wheel - uv pip install -r requirements_all.txt - uv pip install "$(grep 'python-gammu' < requirements_all.txt | sed -e 's|# python-gammu|python-gammu|g')" + uv pip install -r requirements.txt + python -m script.gen_requirements_all ci + uv pip install -r requirements_all_pytest.txt uv pip install -r requirements_test.txt uv pip install -e . --config-settings editable_mode=compat @@ -513,7 +520,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -545,7 +552,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -578,7 +585,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -622,7 +629,7 @@ jobs: - base steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -688,13 +695,14 @@ jobs: steps: - name: Install additional OS dependencies run: | + sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.1.0 @@ -715,7 +723,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: pytest_buckets path: pytest_buckets.txt @@ -748,13 +756,14 @@ jobs: steps: - name: Install additional OS dependencies run: | + sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -776,7 +785,7 @@ jobs: run: | echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" - name: Download pytest_buckets - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: name: pytest_buckets - name: Compile English translations @@ -811,14 +820,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -863,13 +872,14 @@ jobs: steps: - name: Install additional OS dependencies run: | + sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ libmariadb-dev-compat - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -933,7 +943,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -941,7 +951,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -985,13 +995,14 @@ jobs: steps: - name: Install additional OS dependencies run: | + sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ postgresql-server-dev-14 - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -1056,7 +1067,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1064,7 +1075,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1086,9 +1097,9 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: pattern: coverage-* - name: Upload coverage to Codecov @@ -1126,13 +1137,14 @@ jobs: steps: - name: Install additional OS dependencies run: | + sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo apt-get update sudo apt-get -y install \ bluez \ ffmpeg \ libgammu-dev - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.1.0 @@ -1193,14 +1205,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1219,9 +1231,9 @@ jobs: timeout-minutes: 10 steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: pattern: coverage-* - name: Upload coverage to Codecov diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 2b9a2af127f..4f624c582d7 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -21,14 +21,14 @@ jobs: steps: - name: Check out code from GitHub - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.25.1 + uses: github/codeql-action/init@v3.25.3 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.25.1 + uses: github/codeql-action/analyze@v3.25.3 with: category: "/language:python" diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index e61eef36f0b..3cf5a7ed089 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} uses: actions/setup-python@v5.1.0 diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 7102df0ae4d..4f652b7a0a1 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -14,6 +14,10 @@ on: - "homeassistant/package_constraints.txt" - "requirements_all.txt" - "requirements.txt" + - "script/gen_requirements_all.py" + +env: + DEFAULT_PYTHON: "3.12" concurrency: group: ${{ github.workflow }}-${{ github.ref_name}} @@ -28,7 +32,22 @@ jobs: architectures: ${{ steps.info.outputs.architectures }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 + + - name: Set up Python ${{ env.DEFAULT_PYTHON }} + id: python + uses: actions/setup-python@v5.1.0 + with: + python-version: ${{ env.DEFAULT_PYTHON }} + check-latest: true + + - name: Create Python virtual environment + run: | + python -m venv venv + . venv/bin/activate + python --version + pip install "$(grep '^uv' < requirements_test.txt)" + uv pip install -r requirements.txt - name: Get information id: info @@ -63,19 +82,30 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: env_file path: ./.env_file overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.3.1 + uses: actions/upload-artifact@v4.3.3 with: name: requirements_diff path: ./requirements_diff.txt overwrite: true + - name: Generate requirements + run: | + . venv/bin/activate + python -m script.gen_requirements_all ci + + - name: Upload requirements_all_wheels + uses: actions/upload-artifact@v4.3.3 + with: + name: requirements_all_wheels + path: ./requirements_all_wheels_*.txt + core: name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2) if: github.repository_owner == 'home-assistant' @@ -88,15 +118,15 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Download env_file - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: name: env_file - name: Download requirements_diff - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: name: requirements_diff @@ -126,42 +156,22 @@ jobs: arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository - uses: actions/checkout@v4.1.2 + uses: actions/checkout@v4.1.4 - name: Download env_file - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: name: env_file - name: Download requirements_diff - uses: actions/download-artifact@v4.1.4 + uses: actions/download-artifact@v4.1.7 with: name: requirements_diff - - name: (Un)comment packages - run: | - requirement_files="requirements_all.txt requirements_diff.txt" - for requirement_file in ${requirement_files}; do - sed -i "s|# pyuserinput|pyuserinput|g" ${requirement_file} - sed -i "s|# evdev|evdev|g" ${requirement_file} - sed -i "s|# pycups|pycups|g" ${requirement_file} - sed -i "s|# decora-wifi|decora-wifi|g" ${requirement_file} - sed -i "s|# python-gammu|python-gammu|g" ${requirement_file} - - # Some packages are not buildable on armhf anymore - if [ "${{ matrix.arch }}" = "armhf" ]; then - - # Pandas has issues building on armhf, it is expected they - # will drop the platform in the near future (they consider it - # "flimsy" on 386). The following packages depend on pandas, - # so we comment them out. - sed -i "s|env-canada|# env-canada|g" ${requirement_file} - sed -i "s|noaa-coops|# noaa-coops|g" ${requirement_file} - sed -i "s|pyezviz|# pyezviz|g" ${requirement_file} - sed -i "s|pykrakenapi|# pykrakenapi|g" ${requirement_file} - fi - - done + - name: Download requirements_all_wheels + uses: actions/download-artifact@v4.1.7 + with: + name: requirements_all_wheels - name: Split requirements all run: | @@ -169,7 +179,7 @@ jobs: # This is to prevent the build from running out of memory when # resolving packages on 32-bits systems (like armhf, armv7). - split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all.txt requirements_all.txt + split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt - name: Create requirements for cython<3 run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cd42fecbfa1..40757c09e95 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.7 + rev: v0.4.2 hooks: - id: ruff args: diff --git a/.strict-typing b/.strict-typing index 5985938885f..584ccc5ee0a 100644 --- a/.strict-typing +++ b/.strict-typing @@ -235,6 +235,7 @@ homeassistant.components.homeworks.* homeassistant.components.http.* homeassistant.components.huawei_lte.* homeassistant.components.humidifier.* +homeassistant.components.husqvarna_automower.* homeassistant.components.hydrawise.* homeassistant.components.hyperion.* homeassistant.components.ibeacon.* diff --git a/CODEOWNERS b/CODEOWNERS index 3b617f97453..023c0eaa89e 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -127,8 +127,8 @@ build.json @home-assistant/supervisor /tests/components/aprilaire/ @chamberlain2007 /homeassistant/components/aprs/ @PhilRW /tests/components/aprs/ @PhilRW -/homeassistant/components/aranet/ @aschmitz @thecode -/tests/components/aranet/ @aschmitz @thecode +/homeassistant/components/aranet/ @aschmitz @thecode @anrijs +/tests/components/aranet/ @aschmitz @thecode @anrijs /homeassistant/components/arcam_fmj/ @elupus /tests/components/arcam_fmj/ @elupus /homeassistant/components/arris_tg2492lg/ @vanbalken @@ -398,6 +398,8 @@ build.json @home-assistant/supervisor /homeassistant/components/environment_canada/ @gwww @michaeldavie /tests/components/environment_canada/ @gwww @michaeldavie /homeassistant/components/ephember/ @ttroy50 +/homeassistant/components/epic_games_store/ @hacf-fr @Quentame +/tests/components/epic_games_store/ @hacf-fr @Quentame /homeassistant/components/epion/ @lhgravendeel /tests/components/epion/ @lhgravendeel /homeassistant/components/epson/ @pszafer @@ -599,6 +601,8 @@ build.json @home-assistant/supervisor /tests/components/homekit_controller/ @Jc2k @bdraco /homeassistant/components/homematic/ @pvizeli /tests/components/homematic/ @pvizeli +/homeassistant/components/homematicip_cloud/ @hahn-th +/tests/components/homematicip_cloud/ @hahn-th /homeassistant/components/homewizard/ @DCSBL /tests/components/homewizard/ @DCSBL /homeassistant/components/honeywell/ @rdfurman @mkmer @@ -873,8 +877,8 @@ build.json @home-assistant/supervisor /tests/components/motioneye/ @dermotduffy /homeassistant/components/motionmount/ @RJPoelstra /tests/components/motionmount/ @RJPoelstra -/homeassistant/components/mqtt/ @emontnemery @jbouwh -/tests/components/mqtt/ @emontnemery @jbouwh +/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco +/tests/components/mqtt/ @emontnemery @jbouwh @bdraco /homeassistant/components/msteams/ @peroyvind /homeassistant/components/mullvad/ @meichthys /tests/components/mullvad/ @meichthys @@ -1284,8 +1288,8 @@ build.json @home-assistant/supervisor /tests/components/snmp/ @nmaggioni /homeassistant/components/snooz/ @AustinBrunkhorst /tests/components/snooz/ @AustinBrunkhorst -/homeassistant/components/solaredge/ @frenck -/tests/components/solaredge/ @frenck +/homeassistant/components/solaredge/ @frenck @bdraco +/tests/components/solaredge/ @frenck @bdraco /homeassistant/components/solaredge_local/ @drobtravels @scheric /homeassistant/components/solarlog/ @Ernst79 /tests/components/solarlog/ @Ernst79 @@ -1582,8 +1586,8 @@ build.json @home-assistant/supervisor /tests/components/wiz/ @sbidy /homeassistant/components/wled/ @frenck /tests/components/wled/ @frenck -/homeassistant/components/wolflink/ @adamkrol93 -/tests/components/wolflink/ @adamkrol93 +/homeassistant/components/wolflink/ @adamkrol93 @mtielen +/tests/components/wolflink/ @adamkrol93 @mtielen /homeassistant/components/workday/ @fabaff @gjohansson-ST /tests/components/workday/ @fabaff @gjohansson-ST /homeassistant/components/worldclock/ @fabaff diff --git a/Dockerfile b/Dockerfile index 28b65d6383d..c916a3d2f3c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ ENV \ ARG QEMU_CPU # Install uv -RUN pip3 install uv==0.1.27 +RUN pip3 install uv==0.1.35 WORKDIR /usr/src diff --git a/Dockerfile.dev b/Dockerfile.dev index e60456f7b1f..507cc9a7bb2 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -22,6 +22,7 @@ RUN \ libavcodec-dev \ libavdevice-dev \ libavutil-dev \ + libgammu-dev \ libswscale-dev \ libswresample-dev \ libavfilter-dev \ diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index afb364e6d2f..cbc808eb0fa 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -253,6 +253,9 @@ async def async_setup_hass( runtime_config.log_no_color, ) + if runtime_config.debug or hass.loop.get_debug(): + hass.config.debug = True + hass.config.safe_mode = runtime_config.safe_mode hass.config.skip_pip = runtime_config.skip_pip hass.config.skip_pip_packages = runtime_config.skip_pip_packages @@ -316,6 +319,7 @@ async def async_setup_hass( hass = core.HomeAssistant(old_config.config_dir) if old_logging: hass.data[DATA_LOGGING] = old_logging + hass.config.debug = old_config.debug hass.config.skip_pip = old_config.skip_pip hass.config.skip_pip_packages = old_config.skip_pip_packages hass.config.internal_url = old_config.internal_url diff --git a/homeassistant/components/accuweather/manifest.json b/homeassistant/components/accuweather/manifest.json index fa651d98efd..24a8180eef8 100644 --- a/homeassistant/components/accuweather/manifest.json +++ b/homeassistant/components/accuweather/manifest.json @@ -8,6 +8,6 @@ "iot_class": "cloud_polling", "loggers": ["accuweather"], "quality_scale": "platinum", - "requirements": ["accuweather==2.1.1"], + "requirements": ["accuweather==3.0.0"], "single_config_entry": true } diff --git a/homeassistant/components/accuweather/system_health.py b/homeassistant/components/accuweather/system_health.py index 607a557f333..f47828cb5a3 100644 --- a/homeassistant/components/accuweather/system_health.py +++ b/homeassistant/components/accuweather/system_health.py @@ -24,7 +24,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]: """Get info for the info page.""" remaining_requests = list(hass.data[DOMAIN].values())[ 0 - ].accuweather.requests_remaining + ].coordinator_observation.accuweather.requests_remaining return { "can_reach_server": system_health.async_check_can_reach_url(hass, ENDPOINT), diff --git a/homeassistant/components/airthings/sensor.py b/homeassistant/components/airthings/sensor.py index fc91d816aca..f0a3dc5be8f 100644 --- a/homeassistant/components/airthings/sensor.py +++ b/homeassistant/components/airthings/sensor.py @@ -157,3 +157,11 @@ class AirthingsHeaterEnergySensor( def native_value(self) -> StateType: """Return the value reported by the sensor.""" return self.coordinator.data[self._id].sensors[self.entity_description.key] # type: ignore[no-any-return] + + @property + def available(self) -> bool: + """Check if device and sensor is available in data.""" + return ( + super().available + and self.entity_description.key in self.coordinator.data[self._id].sensors + ) diff --git a/homeassistant/components/alexa/intent.py b/homeassistant/components/alexa/intent.py index fdf72ccce28..217d5dccc25 100644 --- a/homeassistant/components/alexa/intent.py +++ b/homeassistant/components/alexa/intent.py @@ -1,5 +1,6 @@ """Support for Alexa skill service end point.""" +from collections.abc import Callable, Coroutine import enum import logging from typing import Any @@ -16,7 +17,9 @@ from .const import DOMAIN, SYN_RESOLUTION_MATCH _LOGGER = logging.getLogger(__name__) -HANDLERS = Registry() # type: ignore[var-annotated] +HANDLERS: Registry[ + str, Callable[[HomeAssistant, dict[str, Any]], Coroutine[Any, Any, dict[str, Any]]] +] = Registry() INTENTS_API_ENDPOINT = "/api/alexa" @@ -129,8 +132,7 @@ async def async_handle_message( if not (handler := HANDLERS.get(req_type)): raise UnknownRequest(f"Received unknown request {req_type}") - response: dict[str, Any] = await handler(hass, message) - return response + return await handler(hass, message) @HANDLERS.register("SessionEndedRequest") diff --git a/homeassistant/components/aranet/const.py b/homeassistant/components/aranet/const.py index 056c627daa8..e038a073fd5 100644 --- a/homeassistant/components/aranet/const.py +++ b/homeassistant/components/aranet/const.py @@ -1,3 +1,4 @@ """Constants for the Aranet integration.""" DOMAIN = "aranet" +ARANET_MANUFACTURER_NAME = "SAF Tehnika" diff --git a/homeassistant/components/aranet/icons.json b/homeassistant/components/aranet/icons.json new file mode 100644 index 00000000000..6d6e9a83b03 --- /dev/null +++ b/homeassistant/components/aranet/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "sensor": { + "radiation_total": { + "default": "mdi:radioactive" + }, + "radiation_rate": { + "default": "mdi:radioactive" + } + } + } +} diff --git a/homeassistant/components/aranet/manifest.json b/homeassistant/components/aranet/manifest.json index 152c56e80f3..a1cd80cc3c7 100644 --- a/homeassistant/components/aranet/manifest.json +++ b/homeassistant/components/aranet/manifest.json @@ -13,7 +13,7 @@ "connectable": false } ], - "codeowners": ["@aschmitz", "@thecode"], + "codeowners": ["@aschmitz", "@thecode", "@anrijs"], "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/aranet", diff --git a/homeassistant/components/aranet/sensor.py b/homeassistant/components/aranet/sensor.py index b55fe2bc5ce..4509aa66027 100644 --- a/homeassistant/components/aranet/sensor.py +++ b/homeassistant/components/aranet/sensor.py @@ -23,6 +23,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( + ATTR_MANUFACTURER, ATTR_NAME, ATTR_SW_VERSION, CONCENTRATION_PARTS_PER_MILLION, @@ -37,7 +38,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from .const import ARANET_MANUFACTURER_NAME, DOMAIN @dataclass(frozen=True) @@ -48,6 +49,7 @@ class AranetSensorEntityDescription(SensorEntityDescription): # Restrict the type to satisfy the type checker and catch attempts # to use UNDEFINED in the entity descriptions. name: str | None = None + scale: float | int = 1 SENSOR_DESCRIPTIONS = { @@ -79,6 +81,24 @@ SENSOR_DESCRIPTIONS = { native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, state_class=SensorStateClass.MEASUREMENT, ), + "radiation_rate": AranetSensorEntityDescription( + key="radiation_rate", + translation_key="radiation_rate", + name="Radiation Dose Rate", + native_unit_of_measurement="μSv/h", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + scale=0.001, + ), + "radiation_total": AranetSensorEntityDescription( + key="radiation_total", + translation_key="radiation_total", + name="Radiation Total Dose", + native_unit_of_measurement="mSv", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=4, + scale=0.000001, + ), "battery": AranetSensorEntityDescription( key="battery", name="Battery", @@ -115,6 +135,7 @@ def _sensor_device_info_to_hass( hass_device_info = DeviceInfo({}) if adv.readings and adv.readings.name: hass_device_info[ATTR_NAME] = adv.readings.name + hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME if adv.manufacturer_data: hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version) return hass_device_info @@ -132,6 +153,7 @@ def sensor_update_to_bluetooth_data_update( val = getattr(adv.readings, key) if val == -1: continue + val *= desc.scale data[tag] = val names[tag] = desc.name descs[tag] = desc diff --git a/homeassistant/components/aranet/strings.json b/homeassistant/components/aranet/strings.json index ac8d1907770..1cc695637d4 100644 --- a/homeassistant/components/aranet/strings.json +++ b/homeassistant/components/aranet/strings.json @@ -17,7 +17,7 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "integrations_diabled": "This device doesn't have integrations enabled. Please enable smart home integrations using the app and try again.", + "integrations_disabled": "This device doesn't have integrations enabled. Please enable smart home integrations using the app and try again.", "no_devices_found": "No unconfigured Aranet devices found.", "outdated_version": "This device is using outdated firmware. Please update it to at least v1.2.0 and try again." } diff --git a/homeassistant/components/assist_pipeline/websocket_api.py b/homeassistant/components/assist_pipeline/websocket_api.py index 7550f860a9b..3e8cdf6fa42 100644 --- a/homeassistant/components/assist_pipeline/websocket_api.py +++ b/homeassistant/components/assist_pipeline/websocket_api.py @@ -291,8 +291,11 @@ def websocket_list_runs( msg["id"], { "pipeline_runs": [ - {"pipeline_run_id": id, "timestamp": pipeline_run.timestamp} - for id, pipeline_run in pipeline_debug.items() + { + "pipeline_run_id": pipeline_run_id, + "timestamp": pipeline_run.timestamp, + } + for pipeline_run_id, pipeline_run in pipeline_debug.items() ] }, ) diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index 89a2817e236..fa242ac1557 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -707,7 +707,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity): @callback def started_action() -> None: - self.hass.bus.async_fire( + # This is always a callback from a coro so there is no + # risk of this running in a thread which allows us to use + # async_fire_internal + self.hass.bus.async_fire_internal( EVENT_AUTOMATION_TRIGGERED, event_data, context=trigger_context ) diff --git a/homeassistant/components/automation/logbook.py b/homeassistant/components/automation/logbook.py index 7b9c8cf5809..33ed586f901 100644 --- a/homeassistant/components/automation/logbook.py +++ b/homeassistant/components/automation/logbook.py @@ -1,5 +1,8 @@ """Describe logbook events.""" +from collections.abc import Callable +from typing import Any + from homeassistant.components.logbook import ( LOGBOOK_ENTRY_CONTEXT_ID, LOGBOOK_ENTRY_ENTITY_ID, @@ -16,11 +19,16 @@ from .const import DOMAIN @callback -def async_describe_events(hass: HomeAssistant, async_describe_event): # type: ignore[no-untyped-def] +def async_describe_events( + hass: HomeAssistant, + async_describe_event: Callable[ + [str, str, Callable[[LazyEventPartialState], dict[str, Any]]], None + ], +) -> None: """Describe logbook events.""" @callback - def async_describe_logbook_event(event: LazyEventPartialState): # type: ignore[no-untyped-def] + def async_describe_logbook_event(event: LazyEventPartialState) -> dict[str, Any]: """Describe a logbook event.""" data = event.data message = "triggered" diff --git a/homeassistant/components/axis/hub/event_source.py b/homeassistant/components/axis/hub/event_source.py new file mode 100644 index 00000000000..7f2bfe7c982 --- /dev/null +++ b/homeassistant/components/axis/hub/event_source.py @@ -0,0 +1,93 @@ +"""Axis network device abstraction.""" + +from __future__ import annotations + +import axis +from axis.errors import Unauthorized +from axis.interfaces.mqtt import mqtt_json_to_event +from axis.models.mqtt import ClientState +from axis.stream_manager import Signal, State + +from homeassistant.components import mqtt +from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN +from homeassistant.components.mqtt.models import ReceiveMessage +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.setup import async_when_setup + + +class AxisEventSource: + """Manage connection to event sources from an Axis device.""" + + def __init__( + self, hass: HomeAssistant, config_entry: ConfigEntry, api: axis.AxisDevice + ) -> None: + """Initialize the device.""" + self.hass = hass + self.config_entry = config_entry + self.api = api + + self.signal_reachable = f"axis_reachable_{config_entry.entry_id}" + + self.available = True + + @callback + def setup(self) -> None: + """Set up the device events.""" + self.api.stream.connection_status_callback.append(self._connection_status_cb) + self.api.enable_events() + self.api.stream.start() + + if self.api.vapix.mqtt.supported: + async_when_setup(self.hass, MQTT_DOMAIN, self._async_use_mqtt) + + @callback + def teardown(self) -> None: + """Tear down connections.""" + self._disconnect_from_stream() + + @callback + def _disconnect_from_stream(self) -> None: + """Stop stream.""" + if self.api.stream.state != State.STOPPED: + self.api.stream.connection_status_callback.clear() + self.api.stream.stop() + + async def _async_use_mqtt(self, hass: HomeAssistant, component: str) -> None: + """Set up to use MQTT.""" + try: + status = await self.api.vapix.mqtt.get_client_status() + except Unauthorized: + # This means the user has too low privileges + return + + if status.status.state == ClientState.ACTIVE: + self.config_entry.async_on_unload( + await mqtt.async_subscribe( + hass, f"{status.config.device_topic_prefix}/#", self._mqtt_message + ) + ) + + @callback + def _mqtt_message(self, message: ReceiveMessage) -> None: + """Receive Axis MQTT message.""" + self._disconnect_from_stream() + + if message.topic.endswith("event/connection"): + return + + event = mqtt_json_to_event(message.payload) + self.api.event.handler(event) + + @callback + def _connection_status_cb(self, status: Signal) -> None: + """Handle signals of device connection status. + + This is called on every RTSP keep-alive message. + Only signal state change if state change is true. + """ + + if self.available != (status == Signal.PLAYING): + self.available = not self.available + async_dispatcher_send(self.hass, self.signal_reachable) diff --git a/homeassistant/components/axis/hub/hub.py b/homeassistant/components/axis/hub/hub.py index 4abd1358417..4e58e3be7c6 100644 --- a/homeassistant/components/axis/hub/hub.py +++ b/homeassistant/components/axis/hub/hub.py @@ -5,24 +5,17 @@ from __future__ import annotations from typing import Any import axis -from axis.errors import Unauthorized -from axis.interfaces.mqtt import mqtt_json_to_event -from axis.models.mqtt import ClientState -from axis.stream_manager import Signal, State -from homeassistant.components import mqtt -from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN -from homeassistant.components.mqtt.models import ReceiveMessage from homeassistant.config_entries import ConfigEntry from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.setup import async_when_setup from ..const import ATTR_MANUFACTURER, DOMAIN as AXIS_DOMAIN from .config import AxisConfig from .entity_loader import AxisEntityLoader +from .event_source import AxisEventSource class AxisHub: @@ -35,9 +28,9 @@ class AxisHub: self.hass = hass self.config = AxisConfig.from_config_entry(config_entry) self.entity_loader = AxisEntityLoader(self) + self.event_source = AxisEventSource(hass, config_entry, api) self.api = api - self.available = True self.fw_version = api.vapix.firmware_version self.product_type = api.vapix.product_type self.unique_id = format_mac(api.vapix.serial_number) @@ -51,32 +44,23 @@ class AxisHub: hub: AxisHub = hass.data[AXIS_DOMAIN][config_entry.entry_id] return hub + @property + def available(self) -> bool: + """Connection state to the device.""" + return self.event_source.available + # Signals @property def signal_reachable(self) -> str: """Device specific event to signal a change in connection status.""" - return f"axis_reachable_{self.config.entry.entry_id}" + return self.event_source.signal_reachable @property def signal_new_address(self) -> str: """Device specific event to signal a change in device address.""" return f"axis_new_address_{self.config.entry.entry_id}" - # Callbacks - - @callback - def connection_status_callback(self, status: Signal) -> None: - """Handle signals of device connection status. - - This is called on every RTSP keep-alive message. - Only signal state change if state change is true. - """ - - if self.available != (status == Signal.PLAYING): - self.available = not self.available - async_dispatcher_send(self.hass, self.signal_reachable) - @staticmethod async def async_new_address_callback( hass: HomeAssistant, config_entry: ConfigEntry @@ -89,6 +73,7 @@ class AxisHub: """ hub = AxisHub.get_hub(hass, config_entry) hub.config = AxisConfig.from_config_entry(config_entry) + hub.event_source.config_entry = config_entry hub.api.config.host = hub.config.host async_dispatcher_send(hass, hub.signal_new_address) @@ -106,57 +91,19 @@ class AxisHub: sw_version=self.fw_version, ) - async def async_use_mqtt(self, hass: HomeAssistant, component: str) -> None: - """Set up to use MQTT.""" - try: - status = await self.api.vapix.mqtt.get_client_status() - except Unauthorized: - # This means the user has too low privileges - return - if status.status.state == ClientState.ACTIVE: - self.config.entry.async_on_unload( - await mqtt.async_subscribe( - hass, f"{status.config.device_topic_prefix}/#", self.mqtt_message - ) - ) - - @callback - def mqtt_message(self, message: ReceiveMessage) -> None: - """Receive Axis MQTT message.""" - self.disconnect_from_stream() - if message.topic.endswith("event/connection"): - return - event = mqtt_json_to_event(message.payload) - self.api.event.handler(event) - # Setup and teardown methods @callback def setup(self) -> None: """Set up the device events.""" self.entity_loader.initialize_platforms() - - self.api.stream.connection_status_callback.append( - self.connection_status_callback - ) - self.api.enable_events() - self.api.stream.start() - - if self.api.vapix.mqtt.supported: - async_when_setup(self.hass, MQTT_DOMAIN, self.async_use_mqtt) - - @callback - def disconnect_from_stream(self) -> None: - """Stop stream.""" - if self.api.stream.state != State.STOPPED: - self.api.stream.connection_status_callback.clear() - self.api.stream.stop() + self.event_source.setup() async def shutdown(self, event: Event) -> None: """Stop the event stream.""" - self.disconnect_from_stream() + self.event_source.teardown() @callback def teardown(self) -> None: """Reset this device to default state.""" - self.disconnect_from_stream() + self.event_source.teardown() diff --git a/homeassistant/components/baf/const.py b/homeassistant/components/baf/const.py index 9876d7ffec3..4d5020bdf02 100644 --- a/homeassistant/components/baf/const.py +++ b/homeassistant/components/baf/const.py @@ -9,7 +9,7 @@ QUERY_INTERVAL = 300 RUN_TIMEOUT = 20 -PRESET_MODE_AUTO = "Auto" +PRESET_MODE_AUTO = "auto" SPEED_COUNT = 7 SPEED_RANGE = (1, SPEED_COUNT) diff --git a/homeassistant/components/baf/fan.py b/homeassistant/components/baf/fan.py index 15c6519747d..6c90e2a53cb 100644 --- a/homeassistant/components/baf/fan.py +++ b/homeassistant/components/baf/fan.py @@ -48,6 +48,7 @@ class BAFFan(BAFEntity, FanEntity): _attr_preset_modes = [PRESET_MODE_AUTO] _attr_speed_count = SPEED_COUNT _attr_name = None + _attr_translation_key = "baf" @callback def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/baf/icons.json b/homeassistant/components/baf/icons.json new file mode 100644 index 00000000000..c91c4cde86a --- /dev/null +++ b/homeassistant/components/baf/icons.json @@ -0,0 +1,15 @@ +{ + "entity": { + "fan": { + "baf": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "mdi:fan-auto" + } + } + } + } + } + } +} diff --git a/homeassistant/components/baf/strings.json b/homeassistant/components/baf/strings.json index 5143b519d27..e2f02a6095e 100644 --- a/homeassistant/components/baf/strings.json +++ b/homeassistant/components/baf/strings.json @@ -26,6 +26,17 @@ "name": "Auto comfort" } }, + "fan": { + "baf": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "[%key:component::climate::entity_component::_::state_attributes::fan_mode::state::auto%]" + } + } + } + } + }, "number": { "comfort_min_speed": { "name": "Auto Comfort Minimum Speed" diff --git a/homeassistant/components/bang_olufsen/__init__.py b/homeassistant/components/bang_olufsen/__init__.py index 2488c2e64f5..07b9d0befe1 100644 --- a/homeassistant/components/bang_olufsen/__init__.py +++ b/homeassistant/components/bang_olufsen/__init__.py @@ -4,7 +4,11 @@ from __future__ import annotations from dataclasses import dataclass -from aiohttp.client_exceptions import ClientConnectorError +from aiohttp.client_exceptions import ( + ClientConnectorError, + ClientOSError, + ServerTimeoutError, +) from mozart_api.exceptions import ApiException from mozart_api.mozart_client import MozartClient @@ -44,12 +48,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: model=entry.data[CONF_MODEL], ) - client = MozartClient(host=entry.data[CONF_HOST], websocket_reconnect=True) + client = MozartClient(host=entry.data[CONF_HOST]) - # Check connection and try to initialize it. + # Check API and WebSocket connection try: - await client.get_battery_state(_request_timeout=3) - except (ApiException, ClientConnectorError, TimeoutError) as error: + await client.check_device_connection(True) + except* ( + ClientConnectorError, + ClientOSError, + ServerTimeoutError, + ApiException, + TimeoutError, + ) as error: await client.close_api_client() raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error @@ -61,11 +71,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client, ) - # Check and start WebSocket connection - if not await client.connect_notifications(remote_control=True): - raise ConfigEntryNotReady( - f"Unable to connect to {entry.title} WebSocket notification channel" - ) + # Start WebSocket connection + await client.connect_notifications(remote_control=True, reconnect=True) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/bang_olufsen/manifest.json b/homeassistant/components/bang_olufsen/manifest.json index 3c920a99d7f..f2b31293227 100644 --- a/homeassistant/components/bang_olufsen/manifest.json +++ b/homeassistant/components/bang_olufsen/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/bang_olufsen", "integration_type": "device", "iot_class": "local_push", - "requirements": ["mozart-api==3.2.1.150.6"], + "requirements": ["mozart-api==3.4.1.8.5"], "zeroconf": ["_bangolufsen._tcp.local."] } diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index 935c057efc8..9f55790d711 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -363,7 +363,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): def is_volume_muted(self) -> bool | None: """Boolean if volume is currently muted.""" if self._volume.muted and self._volume.muted.muted: - return self._volume.muted.muted + # The any return here is side effect of pydantic v2 compatibility + # This will be fixed in the future. + return self._volume.muted.muted # type: ignore[no-any-return] return None @property diff --git a/homeassistant/components/blink/camera.py b/homeassistant/components/blink/camera.py index 318bb18772a..7461d7b2a2b 100644 --- a/homeassistant/components/blink/camera.py +++ b/homeassistant/components/blink/camera.py @@ -3,7 +3,6 @@ from __future__ import annotations from collections.abc import Mapping -import contextlib import logging from typing import Any @@ -97,7 +96,10 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera): await self._camera.async_arm(True) except TimeoutError as er: - raise HomeAssistantError("Blink failed to arm camera") from er + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_arm", + ) from er self._camera.motion_enabled = True await self.coordinator.async_refresh() @@ -107,7 +109,10 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera): try: await self._camera.async_arm(False) except TimeoutError as er: - raise HomeAssistantError("Blink failed to disarm camera") from er + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_disarm", + ) from er self._camera.motion_enabled = False await self.coordinator.async_refresh() @@ -124,8 +129,14 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera): async def trigger_camera(self) -> None: """Trigger camera to take a snapshot.""" - with contextlib.suppress(TimeoutError): + try: await self._camera.snap_picture() + except TimeoutError as er: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="failed_snap", + ) from er + self.async_write_ha_state() def camera_image( diff --git a/homeassistant/components/blink/strings.json b/homeassistant/components/blink/strings.json index 2260acede1c..2c0be3d972c 100644 --- a/homeassistant/components/blink/strings.json +++ b/homeassistant/components/blink/strings.json @@ -106,16 +106,31 @@ }, "exceptions": { "integration_not_found": { - "message": "Integration \"{target}\" not found in registry" + "message": "Integration \"{target}\" not found in registry." }, "no_path": { "message": "Can't write to directory {target}, no access to path!" }, "cant_write": { - "message": "Can't write to file" + "message": "Can't write to file." }, "not_loaded": { - "message": "{target} is not loaded" + "message": "{target} is not loaded." + }, + "failed_arm": { + "message": "Blink failed to arm camera." + }, + "failed_disarm": { + "message": "Blink failed to disarm camera." + }, + "failed_snap": { + "message": "Blink failed to snap a picture." + }, + "failed_arm_motion": { + "message": "Blink failed to arm camera motion detection." + }, + "failed_disarm_motion": { + "message": "Blink failed to disarm camera motion detection." } }, "issues": { diff --git a/homeassistant/components/blink/switch.py b/homeassistant/components/blink/switch.py index 1bfd257ecbe..ab9b825ded1 100644 --- a/homeassistant/components/blink/switch.py +++ b/homeassistant/components/blink/switch.py @@ -75,7 +75,8 @@ class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity): except TimeoutError as er: raise HomeAssistantError( - "Blink failed to arm camera motion detection" + translation_domain=DOMAIN, + translation_key="failed_arm_motion", ) from er await self.coordinator.async_refresh() @@ -87,7 +88,8 @@ class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity): except TimeoutError as er: raise HomeAssistantError( - "Blink failed to dis-arm camera motion detection" + translation_domain=DOMAIN, + translation_key="failed_disarm_motion", ) from er await self.coordinator.async_refresh() diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index cb6f013dbf8..6c63067a1c1 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -934,7 +934,7 @@ class BluesoundPlayer(MediaPlayerEntity): selected_source = items[0] url = f"Play?url={selected_source['url']}&preset_id&image={selected_source['image']}" - if "is_raw_url" in selected_source and selected_source["is_raw_url"]: + if selected_source.get("is_raw_url"): url = selected_source["url"] return await self.send_bluesound_command(url) diff --git a/homeassistant/components/bluetooth/__init__.py b/homeassistant/components/bluetooth/__init__.py index 560fb0663a8..4768d58379a 100644 --- a/homeassistant/components/bluetooth/__init__.py +++ b/homeassistant/components/bluetooth/__init__.py @@ -86,6 +86,7 @@ from .manager import HomeAssistantBluetoothManager from .match import BluetoothCallbackMatcher, IntegrationMatcher from .models import BluetoothCallback, BluetoothChange from .storage import BluetoothStorage +from .util import adapter_title if TYPE_CHECKING: from homeassistant.helpers.typing import ConfigType @@ -332,6 +333,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) from err adapters = await manager.async_get_bluetooth_adapters() details = adapters[adapter] + if entry.title == address: + hass.config_entries.async_update_entry( + entry, title=adapter_title(adapter, details) + ) slots: int = details.get(ADAPTER_CONNECTION_SLOTS) or DEFAULT_CONNECTION_SLOTS entry.async_on_unload(async_register_scanner(hass, scanner, connection_slots=slots)) await async_update_device(hass, entry, adapter, details) diff --git a/homeassistant/components/bluetooth/config_flow.py b/homeassistant/components/bluetooth/config_flow.py index 87038d48151..90d2624fb0f 100644 --- a/homeassistant/components/bluetooth/config_flow.py +++ b/homeassistant/components/bluetooth/config_flow.py @@ -12,7 +12,6 @@ from bluetooth_adapters import ( AdapterDetails, adapter_human_name, adapter_model, - adapter_unique_name, get_adapters, ) import voluptuous as vol @@ -28,6 +27,7 @@ from homeassistant.helpers.typing import DiscoveryInfoType from . import models from .const import CONF_ADAPTER, CONF_DETAILS, CONF_PASSIVE, DOMAIN +from .util import adapter_title OPTIONS_SCHEMA = vol.Schema( { @@ -47,14 +47,6 @@ def adapter_display_info(adapter: str, details: AdapterDetails) -> str: return f"{name} {manufacturer} {model}" -def adapter_title(adapter: str, details: AdapterDetails) -> str: - """Return the adapter title.""" - unique_name = adapter_unique_name(adapter, details[ADAPTER_ADDRESS]) - model = adapter_model(details) - manufacturer = details[ADAPTER_MANUFACTURER] or "Unknown" - return f"{manufacturer} {model} ({unique_name})" - - class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN): """Config flow for Bluetooth.""" diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index b41c344bdf2..ed1e11d8ddd 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -16,8 +16,8 @@ "requirements": [ "bleak==0.21.1", "bleak-retry-connector==3.5.0", - "bluetooth-adapters==0.18.0", - "bluetooth-auto-recovery==1.4.1", + "bluetooth-adapters==0.19.0", + "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.19.0", "dbus-fast==2.21.1", "habluetooth==2.8.0" diff --git a/homeassistant/components/bluetooth/util.py b/homeassistant/components/bluetooth/util.py index 0faac9a8613..8c7ad13294a 100644 --- a/homeassistant/components/bluetooth/util.py +++ b/homeassistant/components/bluetooth/util.py @@ -2,7 +2,14 @@ from __future__ import annotations -from bluetooth_adapters import BluetoothAdapters +from bluetooth_adapters import ( + ADAPTER_ADDRESS, + ADAPTER_MANUFACTURER, + ADAPTER_PRODUCT, + AdapterDetails, + BluetoothAdapters, + adapter_unique_name, +) from bluetooth_data_tools import monotonic_time_coarse from homeassistant.core import callback @@ -69,3 +76,12 @@ def async_load_history_from_system( connectable_loaded_history[address] = service_info return all_loaded_history, connectable_loaded_history + + +@callback +def adapter_title(adapter: str, details: AdapterDetails) -> str: + """Return the adapter title.""" + unique_name = adapter_unique_name(adapter, details[ADAPTER_ADDRESS]) + model = details.get(ADAPTER_PRODUCT, "Unknown") + manufacturer = details[ADAPTER_MANUFACTURER] or "Unknown" + return f"{manufacturer} {model} ({unique_name})" diff --git a/homeassistant/components/bond/config_flow.py b/homeassistant/components/bond/config_flow.py index 45170a0404f..a12d3057258 100644 --- a/homeassistant/components/bond/config_flow.py +++ b/homeassistant/components/bond/config_flow.py @@ -113,7 +113,10 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN): ): updates[CONF_ACCESS_TOKEN] = token return self.async_update_reload_and_abort( - entry, data={**entry.data, **updates}, reason="already_configured" + entry, + data={**entry.data, **updates}, + reason="already_configured", + reload_even_if_entry_is_unchanged=False, ) self._discovered = {CONF_HOST: host, CONF_NAME: bond_id} diff --git a/homeassistant/components/bring/const.py b/homeassistant/components/bring/const.py index 64a6ec67f85..911c08a835d 100644 --- a/homeassistant/components/bring/const.py +++ b/homeassistant/components/bring/const.py @@ -1,3 +1,11 @@ """Constants for the Bring! integration.""" +from typing import Final + DOMAIN = "bring" + +ATTR_SENDER: Final = "sender" +ATTR_ITEM_NAME: Final = "item" +ATTR_NOTIFICATION_TYPE: Final = "message" + +SERVICE_PUSH_NOTIFICATION = "send_message" diff --git a/homeassistant/components/bring/icons.json b/homeassistant/components/bring/icons.json index a757b20a4cc..1c6c3bdeca0 100644 --- a/homeassistant/components/bring/icons.json +++ b/homeassistant/components/bring/icons.json @@ -5,5 +5,8 @@ "default": "mdi:cart" } } + }, + "services": { + "send_message": "mdi:cellphone-message" } } diff --git a/homeassistant/components/bring/services.yaml b/homeassistant/components/bring/services.yaml new file mode 100644 index 00000000000..98d5c68de13 --- /dev/null +++ b/homeassistant/components/bring/services.yaml @@ -0,0 +1,23 @@ +send_message: + target: + entity: + domain: todo + integration: bring + fields: + message: + example: urgent_message + required: true + default: "going_shopping" + selector: + select: + translation_key: "notification_type_selector" + options: + - "going_shopping" + - "changed_list" + - "shopping_done" + - "urgent_message" + item: + example: Cilantro + required: false + selector: + text: diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index 6d61034bea8..e6df885cbbc 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -38,6 +38,42 @@ }, "setup_authentication_exception": { "message": "Authentication failed for {email}, check your email and password" + }, + "notify_missing_argument_item": { + "message": "Failed to call service {service}. 'URGENT_MESSAGE' requires a value @ data['item']. Got None" + }, + "notify_request_failed": { + "message": "Failed to send push notification for bring due to a connection error, try again later" + } + }, + "services": { + "send_message": { + "name": "[%key:component::notify::services::notify::name%]", + "description": "Send a mobile push notification to members of a shared Bring! list.", + "fields": { + "entity_id": { + "name": "List", + "description": "Bring! list whose members (except sender) will be notified." + }, + "message": { + "name": "Notification type", + "description": "Type of push notification to send to list members." + }, + "item": { + "name": "Item (Required if message type `Breaking news` selected)", + "description": "Item name to include in a breaking news message e.g. `Breaking news - Please get cilantro!`" + } + } + } + }, + "selector": { + "notification_type_selector": { + "options": { + "going_shopping": "I'm going shopping! - Last chance for adjustments", + "changed_list": "List changed - Check it out", + "shopping_done": "Shopping done - you can relax", + "urgent_message": "Breaking news - Please get `item`!" + } } } } diff --git a/homeassistant/components/bring/todo.py b/homeassistant/components/bring/todo.py index e631dc32951..5eabcc01553 100644 --- a/homeassistant/components/bring/todo.py +++ b/homeassistant/components/bring/todo.py @@ -6,7 +6,8 @@ from typing import TYPE_CHECKING import uuid from bring_api.exceptions import BringRequestException -from bring_api.types import BringItem, BringItemOperation +from bring_api.types import BringItem, BringItemOperation, BringNotificationType +import voluptuous as vol from homeassistant.components.todo import ( TodoItem, @@ -16,11 +17,18 @@ from homeassistant.components.todo import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv, entity_platform +from homeassistant.helpers.config_validation import make_entity_service_schema from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN +from .const import ( + ATTR_ITEM_NAME, + ATTR_NOTIFICATION_TYPE, + DOMAIN, + SERVICE_PUSH_NOTIFICATION, +) from .coordinator import BringData, BringDataUpdateCoordinator @@ -46,6 +54,21 @@ async def async_setup_entry( for bring_list in coordinator.data.values() ) + platform = entity_platform.async_get_current_platform() + + platform.async_register_entity_service( + SERVICE_PUSH_NOTIFICATION, + make_entity_service_schema( + { + vol.Required(ATTR_NOTIFICATION_TYPE): vol.All( + vol.Upper, cv.enum(BringNotificationType) + ), + vol.Optional(ATTR_ITEM_NAME): cv.string, + } + ), + "async_send_message", + ) + class BringTodoListEntity( CoordinatorEntity[BringDataUpdateCoordinator], TodoListEntity @@ -231,3 +254,26 @@ class BringTodoListEntity( ) from e await self.coordinator.async_refresh() + + async def async_send_message( + self, + message: BringNotificationType, + item: str | None = None, + ) -> None: + """Send a push notification to members of a shared bring list.""" + + try: + await self.coordinator.bring.notify(self._list_uuid, message, item or None) + except BringRequestException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="notify_request_failed", + ) from e + except ValueError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="notify_missing_argument_item", + translation_placeholders={ + "service": f"{DOMAIN}.{SERVICE_PUSH_NOTIFICATION}", + }, + ) from e diff --git a/homeassistant/components/circuit/__init__.py b/homeassistant/components/circuit/__init__.py index f71babad3d5..7e7d0eda76e 100644 --- a/homeassistant/components/circuit/__init__.py +++ b/homeassistant/components/circuit/__init__.py @@ -5,6 +5,7 @@ import voluptuous as vol from homeassistant.const import CONF_NAME, CONF_URL, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, discovery +import homeassistant.helpers.issue_registry as ir from homeassistant.helpers.typing import ConfigType DOMAIN = "circuit" @@ -26,6 +27,17 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Unify Circuit component.""" + ir.async_create_issue( + hass, + DOMAIN, + "service_removal", + breaks_in_ha_version="2024.7.0", + is_fixable=False, + is_persistent=True, + severity=ir.IssueSeverity.WARNING, + translation_key="service_removal", + translation_placeholders={"integration": "Unify Circuit", "domain": DOMAIN}, + ) webhooks = config[DOMAIN][CONF_WEBHOOK] for webhook_conf in webhooks: diff --git a/homeassistant/components/circuit/strings.json b/homeassistant/components/circuit/strings.json new file mode 100644 index 00000000000..b9cb852d5b9 --- /dev/null +++ b/homeassistant/components/circuit/strings.json @@ -0,0 +1,8 @@ +{ + "issues": { + "service_removal": { + "title": "The {integration} integration is being removed", + "description": "The {integration} integration will be removed, as the service is no longer maintained.\n\n\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." + } + } +} diff --git a/homeassistant/components/cloud/__init__.py b/homeassistant/components/cloud/__init__.py index 80f9d9f9368..2552fe4bf5c 100644 --- a/homeassistant/components/cloud/__init__.py +++ b/homeassistant/components/cloud/__init__.py @@ -7,11 +7,14 @@ from collections.abc import Awaitable, Callable from datetime import datetime, timedelta from enum import Enum from typing import cast +from urllib.parse import quote_plus, urljoin from hass_nabucasa import Cloud import voluptuous as vol -from homeassistant.components import alexa, google_assistant +from homeassistant.components import alexa, google_assistant, http +from homeassistant.components.auth import STRICT_CONNECTION_URL +from homeassistant.components.http.auth import async_sign_path from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry from homeassistant.const import ( CONF_DESCRIPTION, @@ -21,8 +24,21 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, Platform, ) -from homeassistant.core import Event, HassJob, HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.core import ( + Event, + HassJob, + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, + callback, +) +from homeassistant.exceptions import ( + HomeAssistantError, + ServiceValidationError, + Unauthorized, + UnknownUser, +) from homeassistant.helpers import config_validation as cv, entityfilter from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.discovery import async_load_platform @@ -31,6 +47,7 @@ from homeassistant.helpers.dispatcher import ( async_dispatcher_send, ) from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.service import async_register_admin_service from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass @@ -265,18 +282,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown) _remote_handle_prefs_updated(cloud) - - async def _service_handler(service: ServiceCall) -> None: - """Handle service for cloud.""" - if service.service == SERVICE_REMOTE_CONNECT: - await prefs.async_update(remote_enabled=True) - elif service.service == SERVICE_REMOTE_DISCONNECT: - await prefs.async_update(remote_enabled=False) - - async_register_admin_service(hass, DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler) - async_register_admin_service( - hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler - ) + _setup_services(hass, prefs) async def async_startup_repairs(_: datetime) -> None: """Create repair issues after startup.""" @@ -395,3 +401,67 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +@callback +def _setup_services(hass: HomeAssistant, prefs: CloudPreferences) -> None: + """Set up services for cloud component.""" + + async def _service_handler(service: ServiceCall) -> None: + """Handle service for cloud.""" + if service.service == SERVICE_REMOTE_CONNECT: + await prefs.async_update(remote_enabled=True) + elif service.service == SERVICE_REMOTE_DISCONNECT: + await prefs.async_update(remote_enabled=False) + + async_register_admin_service(hass, DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler) + async_register_admin_service( + hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler + ) + + async def create_temporary_strict_connection_url( + call: ServiceCall, + ) -> ServiceResponse: + """Create a strict connection url and return it.""" + # Copied form homeassistant/helpers/service.py#_async_admin_handler + # as the helper supports no responses yet + if call.context.user_id: + user = await hass.auth.async_get_user(call.context.user_id) + if user is None: + raise UnknownUser(context=call.context) + if not user.is_admin: + raise Unauthorized(context=call.context) + + if prefs.strict_connection is http.const.StrictConnectionMode.DISABLED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="strict_connection_not_enabled", + ) + + try: + url = get_url(hass, require_cloud=True) + except NoURLAvailableError as ex: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_url_available", + ) from ex + + path = async_sign_path( + hass, + STRICT_CONNECTION_URL, + timedelta(hours=1), + use_content_user=True, + ) + url = urljoin(url, path) + + return { + "url": f"https://login.home-assistant.io?u={quote_plus(url)}", + "direct_url": url, + } + + hass.services.async_register( + DOMAIN, + "create_temporary_strict_connection_url", + create_temporary_strict_connection_url, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/cloud/client.py b/homeassistant/components/cloud/client.py index 01c8de77156..c4d1c1dec60 100644 --- a/homeassistant/components/cloud/client.py +++ b/homeassistant/components/cloud/client.py @@ -250,6 +250,7 @@ class CloudClient(Interface): "enabled": self._prefs.remote_enabled, "instance_domain": self.cloud.remote.instance_domain, "alias": self.cloud.remote.alias, + "strict_connection": self._prefs.strict_connection, }, "version": HA_VERSION, "instance_id": self.prefs.instance_id, diff --git a/homeassistant/components/cloud/const.py b/homeassistant/components/cloud/const.py index 2c58dd57340..8b68eefc443 100644 --- a/homeassistant/components/cloud/const.py +++ b/homeassistant/components/cloud/const.py @@ -33,6 +33,7 @@ PREF_GOOGLE_SETTINGS_VERSION = "google_settings_version" PREF_TTS_DEFAULT_VOICE = "tts_default_voice" PREF_GOOGLE_CONNECTED = "google_connected" PREF_REMOTE_ALLOW_REMOTE_ENABLE = "remote_allow_remote_enable" +PREF_STRICT_CONNECTION = "strict_connection" DEFAULT_TTS_DEFAULT_VOICE = ("en-US", "JennyNeural") DEFAULT_DISABLE_2FA = False DEFAULT_ALEXA_REPORT_STATE = True diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index b577e9de0d4..29185191a20 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -19,7 +19,7 @@ from hass_nabucasa.const import STATE_DISCONNECTED from hass_nabucasa.voice import TTS_VOICES import voluptuous as vol -from homeassistant.components import websocket_api +from homeassistant.components import http, websocket_api from homeassistant.components.alexa import ( entities as alexa_entities, errors as alexa_errors, @@ -46,6 +46,7 @@ from .const import ( PREF_GOOGLE_REPORT_STATE, PREF_GOOGLE_SECURE_DEVICES_PIN, PREF_REMOTE_ALLOW_REMOTE_ENABLE, + PREF_STRICT_CONNECTION, PREF_TTS_DEFAULT_VOICE, REQUEST_TIMEOUT, ) @@ -452,6 +453,9 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]: vol.Coerce(tuple), validate_language_voice ), vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, + vol.Optional(PREF_STRICT_CONNECTION): vol.Coerce( + http.const.StrictConnectionMode + ), } ) @websocket_api.async_response diff --git a/homeassistant/components/cloud/icons.json b/homeassistant/components/cloud/icons.json index 06ee7eb2f19..1a8593388b4 100644 --- a/homeassistant/components/cloud/icons.json +++ b/homeassistant/components/cloud/icons.json @@ -1,5 +1,6 @@ { "services": { + "create_temporary_strict_connection_url": "mdi:login-variant", "remote_connect": "mdi:cloud", "remote_disconnect": "mdi:cloud-off" } diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 49a3fc0bf5c..0d2ee546ad8 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -3,7 +3,7 @@ "name": "Home Assistant Cloud", "after_dependencies": ["assist_pipeline", "google_assistant", "alexa"], "codeowners": ["@home-assistant/cloud"], - "dependencies": ["http", "repairs", "webhook"], + "dependencies": ["auth", "http", "repairs", "webhook"], "documentation": "https://www.home-assistant.io/integrations/cloud", "integration_type": "system", "iot_class": "cloud_push", diff --git a/homeassistant/components/cloud/prefs.py b/homeassistant/components/cloud/prefs.py index af4e68194d6..9fce615128b 100644 --- a/homeassistant/components/cloud/prefs.py +++ b/homeassistant/components/cloud/prefs.py @@ -10,7 +10,7 @@ from hass_nabucasa.voice import MAP_VOICE from homeassistant.auth.const import GROUP_ID_ADMIN from homeassistant.auth.models import User -from homeassistant.components import webhook +from homeassistant.components import http, webhook from homeassistant.components.google_assistant.http import ( async_get_users as async_get_google_assistant_users, ) @@ -44,6 +44,7 @@ from .const import ( PREF_INSTANCE_ID, PREF_REMOTE_ALLOW_REMOTE_ENABLE, PREF_REMOTE_DOMAIN, + PREF_STRICT_CONNECTION, PREF_TTS_DEFAULT_VOICE, PREF_USERNAME, ) @@ -176,6 +177,7 @@ class CloudPreferences: google_settings_version: int | UndefinedType = UNDEFINED, google_connected: bool | UndefinedType = UNDEFINED, remote_allow_remote_enable: bool | UndefinedType = UNDEFINED, + strict_connection: http.const.StrictConnectionMode | UndefinedType = UNDEFINED, ) -> None: """Update user preferences.""" prefs = {**self._prefs} @@ -195,6 +197,7 @@ class CloudPreferences: (PREF_REMOTE_DOMAIN, remote_domain), (PREF_GOOGLE_CONNECTED, google_connected), (PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable), + (PREF_STRICT_CONNECTION, strict_connection), ): if value is not UNDEFINED: prefs[key] = value @@ -242,6 +245,7 @@ class CloudPreferences: PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin, PREF_REMOTE_ALLOW_REMOTE_ENABLE: self.remote_allow_remote_enable, PREF_TTS_DEFAULT_VOICE: self.tts_default_voice, + PREF_STRICT_CONNECTION: self.strict_connection, } @property @@ -358,6 +362,17 @@ class CloudPreferences: """ return self._prefs.get(PREF_TTS_DEFAULT_VOICE, DEFAULT_TTS_DEFAULT_VOICE) # type: ignore[no-any-return] + @property + def strict_connection(self) -> http.const.StrictConnectionMode: + """Return the strict connection mode.""" + mode = self._prefs.get( + PREF_STRICT_CONNECTION, http.const.StrictConnectionMode.DISABLED + ) + + if not isinstance(mode, http.const.StrictConnectionMode): + mode = http.const.StrictConnectionMode(mode) + return mode # type: ignore[no-any-return] + async def get_cloud_user(self) -> str: """Return ID of Home Assistant Cloud system user.""" user = await self._load_cloud_user() @@ -415,4 +430,5 @@ class CloudPreferences: PREF_REMOTE_DOMAIN: None, PREF_REMOTE_ALLOW_REMOTE_ENABLE: True, PREF_USERNAME: username, + PREF_STRICT_CONNECTION: http.const.StrictConnectionMode.DISABLED, } diff --git a/homeassistant/components/cloud/strings.json b/homeassistant/components/cloud/strings.json index 16a82a27c1a..1fec87235da 100644 --- a/homeassistant/components/cloud/strings.json +++ b/homeassistant/components/cloud/strings.json @@ -5,6 +5,14 @@ "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, + "exceptions": { + "strict_connection_not_enabled": { + "message": "Strict connection is not enabled for cloud requests" + }, + "no_url_available": { + "message": "No cloud URL available.\nPlease mark sure you have a working Remote UI." + } + }, "system_health": { "info": { "can_reach_cert_server": "Reach Certificate Server", @@ -73,6 +81,10 @@ } }, "services": { + "create_temporary_strict_connection_url": { + "name": "Create a temporary strict connection URL", + "description": "Create a temporary strict connection URL, which can be used to login on another device." + }, "remote_connect": { "name": "Remote connect", "description": "Makes the instance UI accessible from outside of the local network by using Home Assistant Cloud." diff --git a/homeassistant/components/cloud/util.py b/homeassistant/components/cloud/util.py new file mode 100644 index 00000000000..3e055851fff --- /dev/null +++ b/homeassistant/components/cloud/util.py @@ -0,0 +1,15 @@ +"""Cloud util functions.""" + +from hass_nabucasa import Cloud + +from homeassistant.components import http +from homeassistant.core import HomeAssistant + +from .client import CloudClient +from .const import DOMAIN + + +def get_strict_connection_mode(hass: HomeAssistant) -> http.const.StrictConnectionMode: + """Get the strict connection mode.""" + cloud: Cloud[CloudClient] = hass.data[DOMAIN] + return cloud.client.prefs.strict_connection diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index d93ec349bba..b9264d16f69 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -4,7 +4,9 @@ "codeowners": ["@chemelli74"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/comelit", + "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiocomelit"], + "quality_scale": "silver", "requirements": ["aiocomelit==0.9.0"] } diff --git a/homeassistant/components/control4/__init__.py b/homeassistant/components/control4/__init__.py index b8d195fcb05..86a13de1ac8 100644 --- a/homeassistant/components/control4/__init__.py +++ b/homeassistant/components/control4/__init__.py @@ -30,6 +30,7 @@ from homeassistant.helpers.update_coordinator import ( ) from .const import ( + API_RETRY_TIMES, CONF_ACCOUNT, CONF_CONFIG_LISTENER, CONF_CONTROLLER_UNIQUE_ID, @@ -47,6 +48,17 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.LIGHT, Platform.MEDIA_PLAYER] +async def call_c4_api_retry(func, *func_args): + """Call C4 API function and retry on failure.""" + for i in range(API_RETRY_TIMES): + try: + return await func(*func_args) + except client_exceptions.ClientError as exception: + _LOGGER.error("Error connecting to Control4 account API: %s", exception) + if i == API_RETRY_TIMES - 1: + raise ConfigEntryNotReady(exception) from exception + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Control4 from a config entry.""" hass.data.setdefault(DOMAIN, {}) @@ -74,18 +86,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: controller_unique_id = config[CONF_CONTROLLER_UNIQUE_ID] entry_data[CONF_CONTROLLER_UNIQUE_ID] = controller_unique_id - director_token_dict = await account.getDirectorBearerToken(controller_unique_id) - director_session = aiohttp_client.async_get_clientsession(hass, verify_ssl=False) + director_token_dict = await call_c4_api_retry( + account.getDirectorBearerToken, controller_unique_id + ) + director_session = aiohttp_client.async_get_clientsession(hass, verify_ssl=False) director = C4Director( config[CONF_HOST], director_token_dict[CONF_TOKEN], director_session ) entry_data[CONF_DIRECTOR] = director - # Add Control4 controller to device registry - controller_href = (await account.getAccountControllers())["href"] - entry_data[CONF_DIRECTOR_SW_VERSION] = await account.getControllerOSVersion( - controller_href + controller_href = (await call_c4_api_retry(account.getAccountControllers))["href"] + entry_data[CONF_DIRECTOR_SW_VERSION] = await call_c4_api_retry( + account.getControllerOSVersion, controller_href ) _, model, mac_address = controller_unique_id.split("_", 3) diff --git a/homeassistant/components/control4/const.py b/homeassistant/components/control4/const.py index f8d939e1ac5..57074c00108 100644 --- a/homeassistant/components/control4/const.py +++ b/homeassistant/components/control4/const.py @@ -5,6 +5,8 @@ DOMAIN = "control4" DEFAULT_SCAN_INTERVAL = 5 MIN_SCAN_INTERVAL = 1 +API_RETRY_TIMES = 5 + CONF_ACCOUNT = "account" CONF_DIRECTOR = "director" CONF_DIRECTOR_SW_VERSION = "director_sw_version" diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 8ee27986bb8..82e2adca680 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.3"] + "requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.24"] } diff --git a/homeassistant/components/dhcp/manifest.json b/homeassistant/components/dhcp/manifest.json index 0d77b997e82..b8abd0a9919 100644 --- a/homeassistant/components/dhcp/manifest.json +++ b/homeassistant/components/dhcp/manifest.json @@ -15,7 +15,7 @@ "quality_scale": "internal", "requirements": [ "aiodhcpwatcher==1.0.0", - "aiodiscover==2.0.0", + "aiodiscover==2.1.0", "cached_ipaddress==0.3.0" ] } diff --git a/homeassistant/components/drop_connect/manifest.json b/homeassistant/components/drop_connect/manifest.json index 5df34fce561..ed34767d6e0 100644 --- a/homeassistant/components/drop_connect/manifest.json +++ b/homeassistant/components/drop_connect/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/drop_connect", "iot_class": "local_push", "mqtt": ["drop_connect/discovery/#"], - "requirements": ["dropmqttapi==1.0.2"] + "requirements": ["dropmqttapi==1.0.3"] } diff --git a/homeassistant/components/dwd_weather_warnings/__init__.py b/homeassistant/components/dwd_weather_warnings/__init__.py index 275d47d15ca..9cf73a90a73 100644 --- a/homeassistant/components/dwd_weather_warnings/__init__.py +++ b/homeassistant/components/dwd_weather_warnings/__init__.py @@ -2,23 +2,16 @@ from __future__ import annotations -from dwdwfsapi import DwdWeatherWarningsAPI - from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import CONF_REGION_IDENTIFIER, DOMAIN, PLATFORMS +from .const import DOMAIN, PLATFORMS from .coordinator import DwdWeatherWarningsCoordinator async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" - region_identifier: str = entry.data[CONF_REGION_IDENTIFIER] - - # Initialize the API and coordinator. - api = await hass.async_add_executor_job(DwdWeatherWarningsAPI, region_identifier) - coordinator = DwdWeatherWarningsCoordinator(hass, api) - + coordinator = DwdWeatherWarningsCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator diff --git a/homeassistant/components/dwd_weather_warnings/config_flow.py b/homeassistant/components/dwd_weather_warnings/config_flow.py index 5076dbae187..f148f4e05ac 100644 --- a/homeassistant/components/dwd_weather_warnings/config_flow.py +++ b/homeassistant/components/dwd_weather_warnings/config_flow.py @@ -8,9 +8,15 @@ from dwdwfsapi import DwdWeatherWarningsAPI import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.helpers import entity_registry as er import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig -from .const import CONF_REGION_IDENTIFIER, DOMAIN +from .const import CONF_REGION_DEVICE_TRACKER, CONF_REGION_IDENTIFIER, DOMAIN +from .exceptions import EntityNotFoundError +from .util import get_position_data + +EXCLUSIVE_OPTIONS = (CONF_REGION_IDENTIFIER, CONF_REGION_DEVICE_TRACKER) class DwdWeatherWarningsConfigFlow(ConfigFlow, domain=DOMAIN): @@ -25,27 +31,70 @@ class DwdWeatherWarningsConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict = {} if user_input is not None: - region_identifier = user_input[CONF_REGION_IDENTIFIER] + # Check, if either CONF_REGION_IDENTIFIER or CONF_GPS_TRACKER has been set. + if all(k not in user_input for k in EXCLUSIVE_OPTIONS): + errors["base"] = "no_identifier" + elif all(k in user_input for k in EXCLUSIVE_OPTIONS): + errors["base"] = "ambiguous_identifier" + elif CONF_REGION_IDENTIFIER in user_input: + # Validate region identifier using the API + identifier = user_input[CONF_REGION_IDENTIFIER] - # Validate region identifier using the API - if not await self.hass.async_add_executor_job( - DwdWeatherWarningsAPI, region_identifier - ): - errors["base"] = "invalid_identifier" + if not await self.hass.async_add_executor_job( + DwdWeatherWarningsAPI, identifier + ): + errors["base"] = "invalid_identifier" - if not errors: - # Set the unique ID for this config entry. - await self.async_set_unique_id(region_identifier) - self._abort_if_unique_id_configured() + if not errors: + # Set the unique ID for this config entry. + await self.async_set_unique_id(identifier) + self._abort_if_unique_id_configured() - return self.async_create_entry(title=region_identifier, data=user_input) + return self.async_create_entry(title=identifier, data=user_input) + else: # CONF_REGION_DEVICE_TRACKER + device_tracker = user_input[CONF_REGION_DEVICE_TRACKER] + registry = er.async_get(self.hass) + entity_entry = registry.async_get(device_tracker) + + if entity_entry is None: + errors["base"] = "entity_not_found" + else: + try: + position = get_position_data(self.hass, entity_entry.id) + except EntityNotFoundError: + errors["base"] = "entity_not_found" + except AttributeError: + errors["base"] = "attribute_not_found" + else: + # Validate position using the API + if not await self.hass.async_add_executor_job( + DwdWeatherWarningsAPI, position + ): + errors["base"] = "invalid_identifier" + + # Position is valid here, because the API call was successful. + if not errors and position is not None and entity_entry is not None: + # Set the unique ID for this config entry. + await self.async_set_unique_id(entity_entry.id) + self._abort_if_unique_id_configured() + + # Replace entity ID with registry ID for more stability. + user_input[CONF_REGION_DEVICE_TRACKER] = entity_entry.id + + return self.async_create_entry( + title=device_tracker.removeprefix("device_tracker."), + data=user_input, + ) return self.async_show_form( step_id="user", errors=errors, data_schema=vol.Schema( { - vol.Required(CONF_REGION_IDENTIFIER): cv.string, + vol.Optional(CONF_REGION_IDENTIFIER): cv.string, + vol.Optional(CONF_REGION_DEVICE_TRACKER): EntitySelector( + EntitySelectorConfig(domain="device_tracker") + ), } ), ) diff --git a/homeassistant/components/dwd_weather_warnings/const.py b/homeassistant/components/dwd_weather_warnings/const.py index 75969dee119..4f0a6767660 100644 --- a/homeassistant/components/dwd_weather_warnings/const.py +++ b/homeassistant/components/dwd_weather_warnings/const.py @@ -14,6 +14,7 @@ DOMAIN: Final = "dwd_weather_warnings" CONF_REGION_NAME: Final = "region_name" CONF_REGION_IDENTIFIER: Final = "region_identifier" +CONF_REGION_DEVICE_TRACKER: Final = "region_device_tracker" ATTR_REGION_NAME: Final = "region_name" ATTR_REGION_ID: Final = "region_id" diff --git a/homeassistant/components/dwd_weather_warnings/coordinator.py b/homeassistant/components/dwd_weather_warnings/coordinator.py index a1232697130..465a7c09750 100644 --- a/homeassistant/components/dwd_weather_warnings/coordinator.py +++ b/homeassistant/components/dwd_weather_warnings/coordinator.py @@ -4,23 +4,79 @@ from __future__ import annotations from dwdwfsapi import DwdWeatherWarningsAPI +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import location -from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER +from .const import ( + CONF_REGION_DEVICE_TRACKER, + CONF_REGION_IDENTIFIER, + DEFAULT_SCAN_INTERVAL, + DOMAIN, + LOGGER, +) +from .exceptions import EntityNotFoundError +from .util import get_position_data class DwdWeatherWarningsCoordinator(DataUpdateCoordinator[None]): """Custom coordinator for the dwd_weather_warnings integration.""" - def __init__(self, hass: HomeAssistant, api: DwdWeatherWarningsAPI) -> None: + config_entry: ConfigEntry + api: DwdWeatherWarningsAPI + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Initialize the dwd_weather_warnings coordinator.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL ) - self.api = api + self._device_tracker = None + self._previous_position = None + + async def async_config_entry_first_refresh(self) -> None: + """Perform first refresh.""" + if region_identifier := self.config_entry.data.get(CONF_REGION_IDENTIFIER): + self.api = await self.hass.async_add_executor_job( + DwdWeatherWarningsAPI, region_identifier + ) + else: + self._device_tracker = self.config_entry.data.get( + CONF_REGION_DEVICE_TRACKER + ) + + await super().async_config_entry_first_refresh() async def _async_update_data(self) -> None: """Get the latest data from the DWD Weather Warnings API.""" - await self.hass.async_add_executor_job(self.api.update) + if self._device_tracker: + try: + position = get_position_data(self.hass, self._device_tracker) + except (EntityNotFoundError, AttributeError) as err: + raise UpdateFailed(f"Error fetching position: {repr(err)}") from err + + distance = None + if self._previous_position is not None: + distance = location.distance( + self._previous_position[0], + self._previous_position[1], + position[0], + position[1], + ) + + if distance is None or distance > 50: + # Only create a new object on the first update + # or when the distance to the previous position + # changes by more than 50 meters (to take GPS + # inaccuracy into account). + self.api = await self.hass.async_add_executor_job( + DwdWeatherWarningsAPI, position + ) + else: + # Otherwise update the API to check for new warnings. + await self.hass.async_add_executor_job(self.api.update) + + self._previous_position = position + else: + await self.hass.async_add_executor_job(self.api.update) diff --git a/homeassistant/components/dwd_weather_warnings/exceptions.py b/homeassistant/components/dwd_weather_warnings/exceptions.py new file mode 100644 index 00000000000..cd61cfa6bae --- /dev/null +++ b/homeassistant/components/dwd_weather_warnings/exceptions.py @@ -0,0 +1,7 @@ +"""Exceptions for the dwd_weather_warnings integration.""" + +from homeassistant.exceptions import HomeAssistantError + + +class EntityNotFoundError(HomeAssistantError): + """When a referenced entity was not found.""" diff --git a/homeassistant/components/dwd_weather_warnings/sensor.py b/homeassistant/components/dwd_weather_warnings/sensor.py index d3e3b4a3772..d62c0f4f192 100644 --- a/homeassistant/components/dwd_weather_warnings/sensor.py +++ b/homeassistant/components/dwd_weather_warnings/sensor.py @@ -11,6 +11,8 @@ Wetterwarnungen (Stufe 1) from __future__ import annotations +from typing import Any + from homeassistant.components.sensor import SensorEntity, SensorEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -93,29 +95,27 @@ class DwdWeatherWarningsSensor( entry_type=DeviceEntryType.SERVICE, ) - self.api = coordinator.api - @property - def native_value(self): + def native_value(self) -> int | None: """Return the state of the sensor.""" if self.entity_description.key == CURRENT_WARNING_SENSOR: - return self.api.current_warning_level + return self.coordinator.api.current_warning_level - return self.api.expected_warning_level + return self.coordinator.api.expected_warning_level @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes of the sensor.""" data = { - ATTR_REGION_NAME: self.api.warncell_name, - ATTR_REGION_ID: self.api.warncell_id, - ATTR_LAST_UPDATE: self.api.last_update, + ATTR_REGION_NAME: self.coordinator.api.warncell_name, + ATTR_REGION_ID: self.coordinator.api.warncell_id, + ATTR_LAST_UPDATE: self.coordinator.api.last_update, } if self.entity_description.key == CURRENT_WARNING_SENSOR: - searched_warnings = self.api.current_warnings + searched_warnings = self.coordinator.api.current_warnings else: - searched_warnings = self.api.expected_warnings + searched_warnings = self.coordinator.api.expected_warnings data[ATTR_WARNING_COUNT] = len(searched_warnings) @@ -142,4 +142,4 @@ class DwdWeatherWarningsSensor( @property def available(self) -> bool: """Could the device be accessed during the last update call.""" - return self.api.data_valid + return self.coordinator.api.data_valid diff --git a/homeassistant/components/dwd_weather_warnings/strings.json b/homeassistant/components/dwd_weather_warnings/strings.json index aa460dcc6d5..3f421d338a7 100644 --- a/homeassistant/components/dwd_weather_warnings/strings.json +++ b/homeassistant/components/dwd_weather_warnings/strings.json @@ -2,17 +2,22 @@ "config": { "step": { "user": { - "description": "To identify the desired region, the warncell ID / name is required.", + "description": "To identify the desired region, either the warncell ID / name or device tracker is required. The provided device tracker has to contain the attributes 'latitude' and 'longitude'.", "data": { - "region_identifier": "Warncell ID or name" + "region_identifier": "Warncell ID or name", + "region_device_tracker": "Device tracker entity" } } }, "error": { - "invalid_identifier": "The specified region identifier is invalid." + "no_identifier": "Either the region identifier or device tracker is required.", + "ambiguous_identifier": "The region identifier and device tracker can not be specified together.", + "invalid_identifier": "The specified region identifier / device tracker is invalid.", + "entity_not_found": "The specified device tracker entity was not found.", + "attribute_not_found": "The required `latitude` or `longitude` attribute was not found in the specified device tracker." }, "abort": { - "already_configured": "Warncell ID / name is already configured.", + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "invalid_identifier": "[%key:component::dwd_weather_warnings::config::error::invalid_identifier%]" } }, diff --git a/homeassistant/components/dwd_weather_warnings/util.py b/homeassistant/components/dwd_weather_warnings/util.py new file mode 100644 index 00000000000..730ebf4b71e --- /dev/null +++ b/homeassistant/components/dwd_weather_warnings/util.py @@ -0,0 +1,39 @@ +"""Util functions for the dwd_weather_warnings integration.""" + +from __future__ import annotations + +from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .exceptions import EntityNotFoundError + + +def get_position_data( + hass: HomeAssistant, registry_id: str +) -> tuple[float, float] | None: + """Extract longitude and latitude from a device tracker.""" + registry = er.async_get(hass) + registry_entry = registry.async_get(registry_id) + if registry_entry is None: + raise EntityNotFoundError(f"Failed to find registry entry {registry_id}") + + entity = hass.states.get(registry_entry.entity_id) + if entity is None: + raise EntityNotFoundError(f"Failed to find entity {registry_entry.entity_id}") + + latitude = entity.attributes.get(ATTR_LATITUDE) + if not latitude: + raise AttributeError( + f"Failed to find attribute '{ATTR_LATITUDE}' in {registry_entry.entity_id}", + ATTR_LATITUDE, + ) + + longitude = entity.attributes.get(ATTR_LONGITUDE) + if not longitude: + raise AttributeError( + f"Failed to find attribute '{ATTR_LONGITUDE}' in {registry_entry.entity_id}", + ATTR_LONGITUDE, + ) + + return (latitude, longitude) diff --git a/homeassistant/components/ecobee/__init__.py b/homeassistant/components/ecobee/__init__.py index 8083d0efcb4..6f032fbaae9 100644 --- a/homeassistant/components/ecobee/__init__.py +++ b/homeassistant/components/ecobee/__init__.py @@ -73,6 +73,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + # The legacy Ecobee notify.notify service is deprecated + # was with HA Core 2024.5.0 and will be removed with HA core 2024.11.0 hass.async_create_task( discovery.async_load_platform( hass, @@ -97,7 +99,7 @@ class EcobeeData: ) -> None: """Initialize the Ecobee data object.""" self._hass = hass - self._entry = entry + self.entry = entry self.ecobee = Ecobee( config={ECOBEE_API_KEY: api_key, ECOBEE_REFRESH_TOKEN: refresh_token} ) @@ -117,7 +119,7 @@ class EcobeeData: _LOGGER.debug("Refreshing ecobee tokens and updating config entry") if await self._hass.async_add_executor_job(self.ecobee.refresh_tokens): self._hass.config_entries.async_update_entry( - self._entry, + self.entry, data={ CONF_API_KEY: self.ecobee.config[ECOBEE_API_KEY], CONF_REFRESH_TOKEN: self.ecobee.config[ECOBEE_REFRESH_TOKEN], diff --git a/homeassistant/components/ecobee/climate.py b/homeassistant/components/ecobee/climate.py index e341f4176ad..11675c0bf61 100644 --- a/homeassistant/components/ecobee/climate.py +++ b/homeassistant/components/ecobee/climate.py @@ -12,7 +12,10 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_LOW, FAN_AUTO, FAN_ON, + PRESET_AWAY, + PRESET_HOME, PRESET_NONE, + PRESET_SLEEP, ClimateEntity, ClimateEntityFeature, HVACAction, @@ -60,9 +63,6 @@ PRESET_TEMPERATURE = "temp" PRESET_VACATION = "vacation" PRESET_HOLD_NEXT_TRANSITION = "next_transition" PRESET_HOLD_INDEFINITE = "indefinite" -AWAY_MODE = "awayMode" -PRESET_HOME = "home" -PRESET_SLEEP = "sleep" HAS_HEAT_PUMP = "hasHeatPump" DEFAULT_MIN_HUMIDITY = 15 @@ -103,6 +103,13 @@ ECOBEE_HVAC_ACTION_TO_HASS = { "compWaterHeater": None, } +ECOBEE_TO_HASS_PRESET = { + "Away": PRESET_AWAY, + "Home": PRESET_HOME, + "Sleep": PRESET_SLEEP, +} +HASS_TO_ECOBEE_PRESET = {v: k for k, v in ECOBEE_TO_HASS_PRESET.items()} + PRESET_TO_ECOBEE_HOLD = { PRESET_HOLD_NEXT_TRANSITION: "nextTransition", PRESET_HOLD_INDEFINITE: "indefinite", @@ -348,10 +355,6 @@ class Thermostat(ClimateEntity): self._attr_hvac_modes.insert(0, HVACMode.HEAT_COOL) self._attr_hvac_modes.append(HVACMode.OFF) - self._preset_modes = { - comfort["climateRef"]: comfort["name"] - for comfort in self.thermostat["program"]["climates"] - } self.update_without_throttle = False async def async_update(self) -> None: @@ -474,7 +477,7 @@ class Thermostat(ClimateEntity): return self.thermostat["runtime"]["desiredFanMode"] @property - def preset_mode(self): + def preset_mode(self) -> str | None: """Return current preset mode.""" events = self.thermostat["events"] for event in events: @@ -487,8 +490,8 @@ class Thermostat(ClimateEntity): ): return PRESET_AWAY_INDEFINITELY - if event["holdClimateRef"] in self._preset_modes: - return self._preset_modes[event["holdClimateRef"]] + if name := self.comfort_settings.get(event["holdClimateRef"]): + return ECOBEE_TO_HASS_PRESET.get(name, name) # Any hold not based on a climate is a temp hold return PRESET_TEMPERATURE @@ -499,7 +502,12 @@ class Thermostat(ClimateEntity): self.vacation = event["name"] return PRESET_VACATION - return self._preset_modes[self.thermostat["program"]["currentClimateRef"]] + if name := self.comfort_settings.get( + self.thermostat["program"]["currentClimateRef"] + ): + return ECOBEE_TO_HASS_PRESET.get(name, name) + + return None @property def hvac_mode(self): @@ -545,14 +553,14 @@ class Thermostat(ClimateEntity): return HVACAction.IDLE @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return device specific state attributes.""" status = self.thermostat["equipmentStatus"] return { "fan": self.fan, - "climate_mode": self._preset_modes[ + "climate_mode": self.comfort_settings.get( self.thermostat["program"]["currentClimateRef"] - ], + ), "equipment_running": status, "fan_min_on_time": self.settings["fanMinOnTime"], } @@ -577,6 +585,8 @@ class Thermostat(ClimateEntity): def set_preset_mode(self, preset_mode: str) -> None: """Activate a preset.""" + preset_mode = HASS_TO_ECOBEE_PRESET.get(preset_mode, preset_mode) + if preset_mode == self.preset_mode: return @@ -605,25 +615,14 @@ class Thermostat(ClimateEntity): elif preset_mode == PRESET_NONE: self.data.ecobee.resume_program(self.thermostat_index) - elif preset_mode in self.preset_modes: - climate_ref = None - - for comfort in self.thermostat["program"]["climates"]: - if comfort["name"] == preset_mode: - climate_ref = comfort["climateRef"] + else: + for climate_ref, name in self.comfort_settings.items(): + if name == preset_mode: + preset_mode = climate_ref break - - if climate_ref is not None: - self.data.ecobee.set_climate_hold( - self.thermostat_index, - climate_ref, - self.hold_preference(), - self.hold_hours(), - ) else: _LOGGER.warning("Received unknown preset mode: %s", preset_mode) - else: self.data.ecobee.set_climate_hold( self.thermostat_index, preset_mode, @@ -632,11 +631,22 @@ class Thermostat(ClimateEntity): ) @property - def preset_modes(self): + def preset_modes(self) -> list[str] | None: """Return available preset modes.""" # Return presets provided by the ecobee API, and an indefinite away # preset which we handle separately in set_preset_mode(). - return [*self._preset_modes.values(), PRESET_AWAY_INDEFINITELY] + return [ + ECOBEE_TO_HASS_PRESET.get(name, name) + for name in self.comfort_settings.values() + ] + [PRESET_AWAY_INDEFINITELY] + + @property + def comfort_settings(self) -> dict[str, str]: + """Return ecobee API comfort settings.""" + return { + comfort["climateRef"]: comfort["name"] + for comfort in self.thermostat["program"]["climates"] + } def set_auto_temp_hold(self, heat_temp, cool_temp): """Set temperature hold in auto mode.""" diff --git a/homeassistant/components/ecobee/const.py b/homeassistant/components/ecobee/const.py index e20acb5cfca..0eed0ab67f9 100644 --- a/homeassistant/components/ecobee/const.py +++ b/homeassistant/components/ecobee/const.py @@ -46,6 +46,7 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.HUMIDIFIER, + Platform.NOTIFY, Platform.NUMBER, Platform.SENSOR, Platform.WEATHER, diff --git a/homeassistant/components/ecobee/manifest.json b/homeassistant/components/ecobee/manifest.json index f3f5b59a36f..7e461230600 100644 --- a/homeassistant/components/ecobee/manifest.json +++ b/homeassistant/components/ecobee/manifest.json @@ -3,6 +3,7 @@ "name": "ecobee", "codeowners": [], "config_flow": true, + "dependencies": ["http", "repairs"], "documentation": "https://www.home-assistant.io/integrations/ecobee", "homekit": { "models": ["EB", "ecobee*"] diff --git a/homeassistant/components/ecobee/notify.py b/homeassistant/components/ecobee/notify.py index b2f6ccb05c8..787130c403f 100644 --- a/homeassistant/components/ecobee/notify.py +++ b/homeassistant/components/ecobee/notify.py @@ -2,11 +2,23 @@ from __future__ import annotations -from homeassistant.components.notify import ATTR_TARGET, BaseNotificationService +from functools import partial +from typing import Any + +from homeassistant.components.notify import ( + ATTR_TARGET, + BaseNotificationService, + NotifyEntity, +) +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from . import Ecobee, EcobeeData from .const import DOMAIN +from .entity import EcobeeBaseEntity +from .repairs import migrate_notify_issue def get_service( @@ -18,18 +30,25 @@ def get_service( if discovery_info is None: return None - data = hass.data[DOMAIN] + data: EcobeeData = hass.data[DOMAIN] return EcobeeNotificationService(data.ecobee) class EcobeeNotificationService(BaseNotificationService): """Implement the notification service for the Ecobee thermostat.""" - def __init__(self, ecobee): + def __init__(self, ecobee: Ecobee) -> None: """Initialize the service.""" self.ecobee = ecobee - def send_message(self, message="", **kwargs): + async def async_send_message(self, message: str = "", **kwargs: Any) -> None: + """Send a message and raise issue.""" + migrate_notify_issue(self.hass) + await self.hass.async_add_executor_job( + partial(self.send_message, message, **kwargs) + ) + + def send_message(self, message: str = "", **kwargs: Any) -> None: """Send a message.""" targets = kwargs.get(ATTR_TARGET) @@ -39,3 +58,33 @@ class EcobeeNotificationService(BaseNotificationService): for target in targets: thermostat_index = int(target) self.ecobee.send_message(thermostat_index, message) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the ecobee thermostat.""" + data: EcobeeData = hass.data[DOMAIN] + async_add_entities( + EcobeeNotifyEntity(data, index) for index in range(len(data.ecobee.thermostats)) + ) + + +class EcobeeNotifyEntity(EcobeeBaseEntity, NotifyEntity): + """Implement the notification entity for the Ecobee thermostat.""" + + _attr_name = None + _attr_has_entity_name = True + + def __init__(self, data: EcobeeData, thermostat_index: int) -> None: + """Initialize the thermostat.""" + super().__init__(data, thermostat_index) + self._attr_unique_id = ( + f"{self.thermostat["identifier"]}_notify_{thermostat_index}" + ) + + def send_message(self, message: str) -> None: + """Send a message.""" + self.data.ecobee.send_message(self.thermostat_index, message) diff --git a/homeassistant/components/ecobee/repairs.py b/homeassistant/components/ecobee/repairs.py new file mode 100644 index 00000000000..66474730b2f --- /dev/null +++ b/homeassistant/components/ecobee/repairs.py @@ -0,0 +1,37 @@ +"""Repairs support for Ecobee.""" + +from __future__ import annotations + +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.components.repairs import RepairsFlow +from homeassistant.components.repairs.issue_handler import ConfirmRepairFlow +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import issue_registry as ir + +from .const import DOMAIN + + +@callback +def migrate_notify_issue(hass: HomeAssistant) -> None: + """Ensure an issue is registered.""" + ir.async_create_issue( + hass, + DOMAIN, + "migrate_notify", + breaks_in_ha_version="2024.11.0", + issue_domain=NOTIFY_DOMAIN, + is_fixable=True, + is_persistent=True, + translation_key="migrate_notify", + severity=ir.IssueSeverity.WARNING, + ) + + +async def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, +) -> RepairsFlow: + """Create flow.""" + assert issue_id == "migrate_notify" + return ConfirmRepairFlow() diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index b1d1df65417..1d64b6d6b94 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -163,5 +163,18 @@ } } } + }, + "issues": { + "migrate_notify": { + "title": "Migration of Ecobee notify service", + "fix_flow": { + "step": { + "confirm": { + "description": "The Ecobee `notify` service has been migrated. A new `notify` entity per Thermostat is available now.\n\nUpdate any automations to use the new `notify.send_message` exposed by these new entities. When this is done, fix this issue and restart Home Assistant.", + "title": "Disable legacy Ecobee notify service" + } + } + } + } } } diff --git a/homeassistant/components/ecovacs/config_flow.py b/homeassistant/components/ecovacs/config_flow.py index a1ea19144b0..4a421113f5f 100644 --- a/homeassistant/components/ecovacs/config_flow.py +++ b/homeassistant/components/ecovacs/config_flow.py @@ -71,7 +71,7 @@ async def _validate_input( if errors: return errors - device_id = get_client_device_id() + device_id = get_client_device_id(hass, rest_url is not None) country = user_input[CONF_COUNTRY] rest_config = create_rest_config( aiohttp_client.async_get_clientsession(hass), diff --git a/homeassistant/components/ecovacs/const.py b/homeassistant/components/ecovacs/const.py index e5ef0760182..6b77404e935 100644 --- a/homeassistant/components/ecovacs/const.py +++ b/homeassistant/components/ecovacs/const.py @@ -12,8 +12,10 @@ CONF_OVERRIDE_MQTT_URL = "override_mqtt_url" CONF_VERIFY_MQTT_CERTIFICATE = "verify_mqtt_certificate" SUPPORTED_LIFESPANS = ( + LifeSpan.BLADE, LifeSpan.BRUSH, LifeSpan.FILTER, + LifeSpan.LENS_BRUSH, LifeSpan.SIDE_BRUSH, ) diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index 5defcdf861f..6b6fe3128dd 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -43,7 +43,8 @@ class EcovacsController: self._hass = hass self._devices: list[Device] = [] self.legacy_devices: list[VacBot] = [] - self._device_id = get_client_device_id() + rest_url = config.get(CONF_OVERRIDE_REST_URL) + self._device_id = get_client_device_id(hass, rest_url is not None) country = config[CONF_COUNTRY] self._continent = get_continent(country) @@ -52,7 +53,7 @@ class EcovacsController: aiohttp_client.async_get_clientsession(self._hass), device_id=self._device_id, alpha_2_country=country, - override_rest_url=config.get(CONF_OVERRIDE_REST_URL), + override_rest_url=rest_url, ), config[CONF_USERNAME], md5(config[CONF_PASSWORD]), diff --git a/homeassistant/components/ecovacs/event.py b/homeassistant/components/ecovacs/event.py index daac4a626ae..fb4c25c7559 100644 --- a/homeassistant/components/ecovacs/event.py +++ b/homeassistant/components/ecovacs/event.py @@ -13,6 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .controller import EcovacsController from .entity import EcovacsEntity +from .util import get_name_key async def async_setup_entry( @@ -54,10 +55,7 @@ class EcovacsLastJobEventEntity( # we trigger only on job done return - event_type = event.status.name.lower() - if event.status == CleanJobStatus.MANUAL_STOPPED: - event_type = "manually_stopped" - + event_type = get_name_key(event.status) self._trigger_event(event_type) self.async_write_ha_state() diff --git a/homeassistant/components/ecovacs/icons.json b/homeassistant/components/ecovacs/icons.json index 2e2d897c455..44c577104dd 100644 --- a/homeassistant/components/ecovacs/icons.json +++ b/homeassistant/components/ecovacs/icons.json @@ -12,12 +12,18 @@ "relocate": { "default": "mdi:map-marker-question" }, + "reset_lifespan_blade": { + "default": "mdi:saw-blade" + }, "reset_lifespan_brush": { "default": "mdi:broom" }, "reset_lifespan_filter": { "default": "mdi:air-filter" }, + "reset_lifespan_lens_brush": { + "default": "mdi:broom" + }, "reset_lifespan_side_brush": { "default": "mdi:broom" } @@ -42,12 +48,18 @@ "error": { "default": "mdi:alert-circle" }, + "lifespan_blade": { + "default": "mdi:saw-blade" + }, "lifespan_brush": { "default": "mdi:broom" }, "lifespan_filter": { "default": "mdi:air-filter" }, + "lifespan_lens_brush": { + "default": "mdi:broom" + }, "lifespan_side_brush": { "default": "mdi:broom" }, diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 52753e6eb39..aad04d9ec87 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.9", "deebot-client==6.0.2"] + "requirements": ["py-sucks==0.9.9", "deebot-client==7.1.0"] } diff --git a/homeassistant/components/ecovacs/select.py b/homeassistant/components/ecovacs/select.py index 8a3def54e28..01d4c5aae6b 100644 --- a/homeassistant/components/ecovacs/select.py +++ b/homeassistant/components/ecovacs/select.py @@ -22,7 +22,7 @@ from .entity import ( EcovacsDescriptionEntity, EventT, ) -from .util import get_supported_entitites +from .util import get_name_key, get_supported_entitites @dataclass(kw_only=True, frozen=True) @@ -41,8 +41,8 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = ( EcovacsSelectEntityDescription[WaterInfoEvent]( device_capabilities=VacuumCapabilities, capability_fn=lambda caps: caps.water, - current_option_fn=lambda e: e.amount.display_name, - options_fn=lambda water: [amount.display_name for amount in water.types], + current_option_fn=lambda e: get_name_key(e.amount), + options_fn=lambda water: [get_name_key(amount) for amount in water.types], key="water_amount", translation_key="water_amount", entity_category=EntityCategory.CONFIG, @@ -50,8 +50,8 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = ( EcovacsSelectEntityDescription[WorkModeEvent]( device_capabilities=VacuumCapabilities, capability_fn=lambda caps: caps.clean.work_mode, - current_option_fn=lambda e: e.mode.display_name, - options_fn=lambda cap: [mode.display_name for mode in cap.types], + current_option_fn=lambda e: get_name_key(e.mode), + options_fn=lambda cap: [get_name_key(mode) for mode in cap.types], key="work_mode", translation_key="work_mode", entity_registry_enabled_default=False, diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index 50afd21deb3..bb27bd6941d 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -46,12 +46,18 @@ "relocate": { "name": "Relocate" }, + "reset_lifespan_blade": { + "name": "Reset blade lifespan" + }, "reset_lifespan_brush": { "name": "Reset main brush lifespan" }, "reset_lifespan_filter": { "name": "Reset filter lifespan" }, + "reset_lifespan_lens_brush": { + "name": "Reset lens brush lifespan" + }, "reset_lifespan_side_brush": { "name": "Reset side brushes lifespan" } @@ -92,12 +98,18 @@ } } }, + "lifespan_blade": { + "name": "Blade lifespan" + }, "lifespan_brush": { "name": "Main brush lifespan" }, "lifespan_filter": { "name": "Filter lifespan" }, + "lifespan_lens_brush": { + "name": "Lens brush lifespan" + }, "lifespan_side_brush": { "name": "Side brushes lifespan" }, diff --git a/homeassistant/components/ecovacs/util.py b/homeassistant/components/ecovacs/util.py index 14e69cd4b61..9d692bbbb8f 100644 --- a/homeassistant/components/ecovacs/util.py +++ b/homeassistant/components/ecovacs/util.py @@ -2,12 +2,16 @@ from __future__ import annotations +from enum import Enum import random import string from typing import TYPE_CHECKING from deebot_client.capabilities import Capabilities +from homeassistant.core import HomeAssistant, callback +from homeassistant.util import slugify + from .entity import ( EcovacsCapabilityEntityDescription, EcovacsDescriptionEntity, @@ -18,8 +22,11 @@ if TYPE_CHECKING: from .controller import EcovacsController -def get_client_device_id() -> str: +def get_client_device_id(hass: HomeAssistant, self_hosted: bool) -> str: """Get client device id.""" + if self_hosted: + return f"HA-{slugify(hass.config.location_name)}" + return "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(8) ) @@ -38,3 +45,9 @@ def get_supported_entitites( if isinstance(device.capabilities, description.device_capabilities) if (capability := description.capability_fn(device.capabilities)) ] + + +@callback +def get_name_key(enum: Enum) -> str: + """Return the lower case name of the enum.""" + return enum.name.lower() diff --git a/homeassistant/components/ecovacs/vacuum.py b/homeassistant/components/ecovacs/vacuum.py index d5016ab683d..0e990645d7c 100644 --- a/homeassistant/components/ecovacs/vacuum.py +++ b/homeassistant/components/ecovacs/vacuum.py @@ -33,6 +33,7 @@ from homeassistant.util import slugify from .const import DOMAIN from .controller import EcovacsController from .entity import EcovacsEntity +from .util import get_name_key _LOGGER = logging.getLogger(__name__) @@ -242,7 +243,7 @@ class EcovacsVacuum( self._rooms: list[Room] = [] self._attr_fan_speed_list = [ - level.display_name for level in capabilities.fan_speed.types + get_name_key(level) for level in capabilities.fan_speed.types ] async def async_added_to_hass(self) -> None: @@ -254,7 +255,7 @@ class EcovacsVacuum( self.async_write_ha_state() async def on_fan_speed(event: FanSpeedEvent) -> None: - self._attr_fan_speed = event.speed.display_name + self._attr_fan_speed = get_name_key(event.speed) self.async_write_ha_state() async def on_rooms(event: RoomsEvent) -> None: diff --git a/homeassistant/components/emoncms_history/__init__.py b/homeassistant/components/emoncms_history/__init__.py index ab3f2671b99..7de3a4f2ef8 100644 --- a/homeassistant/components/emoncms_history/__init__.py +++ b/homeassistant/components/emoncms_history/__init__.py @@ -86,8 +86,8 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: continue if payload_dict: - payload = "{%s}" % ",".join( - f"{key}:{val}" for key, val in payload_dict.items() + payload = "{{{}}}".format( + ",".join(f"{key}:{val}" for key, val in payload_dict.items()) ) send_data( diff --git a/homeassistant/components/energy/sensor.py b/homeassistant/components/energy/sensor.py index 37930e31af0..147d8f3e26a 100644 --- a/homeassistant/components/energy/sensor.py +++ b/homeassistant/components/energy/sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import Callable +from collections.abc import Callable, Mapping import copy from dataclasses import dataclass import logging @@ -167,8 +167,7 @@ class SensorManager: if adapter.flow_type is None: self._process_sensor_data( adapter, - # Opting out of the type complexity because can't get it to work - energy_source, # type: ignore[arg-type] + energy_source, to_add, to_remove, ) @@ -177,8 +176,7 @@ class SensorManager: for flow in energy_source[adapter.flow_type]: # type: ignore[typeddict-item] self._process_sensor_data( adapter, - # Opting out of the type complexity because can't get it to work - flow, # type: ignore[arg-type] + flow, to_add, to_remove, ) @@ -189,7 +187,7 @@ class SensorManager: def _process_sensor_data( self, adapter: SourceAdapter, - config: dict, + config: Mapping[str, Any], to_add: list[EnergyCostSensor], to_remove: dict[tuple[str, str | None, str], EnergyCostSensor], ) -> None: @@ -241,7 +239,7 @@ class EnergyCostSensor(SensorEntity): def __init__( self, adapter: SourceAdapter, - config: dict, + config: Mapping[str, Any], ) -> None: """Initialize the sensor.""" super().__init__() @@ -456,7 +454,7 @@ class EnergyCostSensor(SensorEntity): await super().async_will_remove_from_hass() @callback - def update_config(self, config: dict) -> None: + def update_config(self, config: Mapping[str, Any]) -> None: """Update the config.""" self._config = config diff --git a/homeassistant/components/energy/websocket_api.py b/homeassistant/components/energy/websocket_api.py index 2dd45a8be4d..2b5b71d3e2f 100644 --- a/homeassistant/components/energy/websocket_api.py +++ b/homeassistant/components/energy/websocket_api.py @@ -31,7 +31,7 @@ from .data import ( EnergyPreferencesUpdate, async_get_manager, ) -from .types import EnergyPlatform, GetSolarForecastType +from .types import EnergyPlatform, GetSolarForecastType, SolarForecastType from .validate import async_validate EnergyWebSocketCommandHandler = Callable[ @@ -203,19 +203,18 @@ async def ws_solar_forecast( for source in manager.data["energy_sources"]: if ( source["type"] != "solar" - or source.get("config_entry_solar_forecast") is None + or (solar_forecast := source.get("config_entry_solar_forecast")) is None ): continue - # typing is not catching the above guard for config_entry_solar_forecast being none - for config_entry in source["config_entry_solar_forecast"]: # type: ignore[union-attr] - config_entries[config_entry] = None + for entry in solar_forecast: + config_entries[entry] = None if not config_entries: connection.send_result(msg["id"], {}) return - forecasts = {} + forecasts: dict[str, SolarForecastType] = {} forecast_platforms = await async_get_energy_platforms(hass) diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index 2407f807eb7..322f909437a 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -46,6 +46,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" + coordinator: EnphaseUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator.async_cancel_token_refresh() unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index 13894d423d6..5f859d16142 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -89,6 +89,14 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle a flow initialized by zeroconf discovery.""" + if _LOGGER.isEnabledFor(logging.DEBUG): + current_hosts = self._async_current_hosts() + _LOGGER.debug( + "Zeroconf ip %s processing %s, current hosts: %s", + discovery_info.ip_address.version, + discovery_info.host, + current_hosts, + ) if discovery_info.ip_address.version != 4: return self.async_abort(reason="not_ipv4_address") serial = discovery_info.properties["serialnum"] @@ -96,17 +104,27 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(serial) self.ip_address = discovery_info.host self._abort_if_unique_id_configured({CONF_HOST: self.ip_address}) + _LOGGER.debug( + "Zeroconf ip %s, fw %s, no existing entry with serial %s", + self.ip_address, + self.protovers, + serial, + ) for entry in self._async_current_entries(include_ignore=False): if ( entry.unique_id is None and CONF_HOST in entry.data and entry.data[CONF_HOST] == self.ip_address ): + _LOGGER.debug( + "Zeroconf update envoy with this ip and blank serial in unique_id", + ) title = f"{ENVOY} {serial}" if entry.title == ENVOY else ENVOY return self.async_update_reload_and_abort( entry, title=title, unique_id=serial, reason="already_configured" ) + _LOGGER.debug("Zeroconf ip %s to step user", self.ip_address) return await self.async_step_user() async def async_step_reauth( diff --git a/homeassistant/components/enphase_envoy/coordinator.py b/homeassistant/components/enphase_envoy/coordinator.py index a508d5127d6..04f93098ad9 100644 --- a/homeassistant/components/enphase_envoy/coordinator.py +++ b/homeassistant/components/enphase_envoy/coordinator.py @@ -83,9 +83,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): def _async_mark_setup_complete(self) -> None: """Mark setup as complete and setup token refresh if needed.""" self._setup_complete = True - if self._cancel_token_refresh: - self._cancel_token_refresh() - self._cancel_token_refresh = None + self.async_cancel_token_refresh() if not isinstance(self.envoy.auth, EnvoyTokenAuth): return self._cancel_token_refresh = async_track_time_interval( @@ -159,3 +157,10 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): return envoy_data.raw raise RuntimeError("Unreachable code in _async_update_data") # pragma: no cover + + @callback + def async_cancel_token_refresh(self) -> None: + """Cancel token refresh.""" + if self._cancel_token_refresh: + self._cancel_token_refresh() + self._cancel_token_refresh = None diff --git a/homeassistant/components/epic_games_store/__init__.py b/homeassistant/components/epic_games_store/__init__.py new file mode 100644 index 00000000000..af25eb98137 --- /dev/null +++ b/homeassistant/components/epic_games_store/__init__.py @@ -0,0 +1,35 @@ +"""The Epic Games Store integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import EGSCalendarUpdateCoordinator + +PLATFORMS: list[Platform] = [ + Platform.CALENDAR, +] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Epic Games Store from a config entry.""" + + coordinator = EGSCalendarUpdateCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + hass.data[DOMAIN].pop(entry.entry_id) + + return unload_ok diff --git a/homeassistant/components/epic_games_store/calendar.py b/homeassistant/components/epic_games_store/calendar.py new file mode 100644 index 00000000000..75c448e8467 --- /dev/null +++ b/homeassistant/components/epic_games_store/calendar.py @@ -0,0 +1,97 @@ +"""Calendar platform for a Epic Games Store.""" + +from __future__ import annotations + +from collections import namedtuple +from datetime import datetime +from typing import Any + +from homeassistant.components.calendar import CalendarEntity, CalendarEvent +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, CalendarType +from .coordinator import EGSCalendarUpdateCoordinator + +DateRange = namedtuple("DateRange", ["start", "end"]) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the local calendar platform.""" + coordinator: EGSCalendarUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + + entities = [ + EGSCalendar(coordinator, entry.entry_id, CalendarType.FREE), + EGSCalendar(coordinator, entry.entry_id, CalendarType.DISCOUNT), + ] + async_add_entities(entities) + + +class EGSCalendar(CoordinatorEntity[EGSCalendarUpdateCoordinator], CalendarEntity): + """A calendar entity by Epic Games Store.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: EGSCalendarUpdateCoordinator, + config_entry_id: str, + cal_type: CalendarType, + ) -> None: + """Initialize EGSCalendar.""" + super().__init__(coordinator) + self._cal_type = cal_type + self._attr_translation_key = f"{cal_type}_games" + self._attr_unique_id = f"{config_entry_id}-{cal_type}" + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, config_entry_id)}, + manufacturer="Epic Games Store", + name="Epic Games Store", + ) + + @property + def event(self) -> CalendarEvent | None: + """Return the next upcoming event.""" + if event := self.coordinator.data[self._cal_type]: + return _get_calendar_event(event[0]) + return None + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Get all events in a specific time frame.""" + events = filter( + lambda game: _are_date_range_overlapping( + DateRange(start=game["discount_start_at"], end=game["discount_end_at"]), + DateRange(start=start_date, end=end_date), + ), + self.coordinator.data[self._cal_type], + ) + return [_get_calendar_event(event) for event in events] + + +def _get_calendar_event(event: dict[str, Any]) -> CalendarEvent: + """Return a CalendarEvent from an API event.""" + return CalendarEvent( + summary=event["title"], + start=event["discount_start_at"], + end=event["discount_end_at"], + description=f"{event['description']}\n\n{event['url']}", + ) + + +def _are_date_range_overlapping(range1: DateRange, range2: DateRange) -> bool: + """Return a CalendarEvent from an API event.""" + latest_start = max(range1.start, range2.start) + earliest_end = min(range1.end, range2.end) + delta = (earliest_end - latest_start).days + 1 + overlap = max(0, delta) + return overlap > 0 diff --git a/homeassistant/components/epic_games_store/config_flow.py b/homeassistant/components/epic_games_store/config_flow.py new file mode 100644 index 00000000000..2ae86060ba2 --- /dev/null +++ b/homeassistant/components/epic_games_store/config_flow.py @@ -0,0 +1,96 @@ +"""Config flow for Epic Games Store integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from epicstore_api import EpicGamesStoreAPI +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.selector import ( + CountrySelector, + LanguageSelector, + LanguageSelectorConfig, +) + +from .const import DOMAIN, SUPPORTED_LANGUAGES + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_LANGUAGE): LanguageSelector( + LanguageSelectorConfig(languages=SUPPORTED_LANGUAGES) + ), + vol.Required(CONF_COUNTRY): CountrySelector(), + } +) + + +def get_default_language(hass: HomeAssistant) -> str | None: + """Get default language code based on Home Assistant config.""" + language_code = f"{hass.config.language}-{hass.config.country}" + if language_code in SUPPORTED_LANGUAGES: + return language_code + if hass.config.language in SUPPORTED_LANGUAGES: + return hass.config.language + return None + + +async def validate_input(hass: HomeAssistant, user_input: dict[str, Any]) -> None: + """Validate the user input allows us to connect.""" + api = EpicGamesStoreAPI(user_input[CONF_LANGUAGE], user_input[CONF_COUNTRY]) + data = await hass.async_add_executor_job(api.get_free_games) + + if data.get("errors"): + _LOGGER.warning(data["errors"]) + + assert data["data"]["Catalog"]["searchStore"]["elements"] + + +class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for Epic Games Store.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + data_schema = self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, + user_input + or { + CONF_LANGUAGE: get_default_language(self.hass), + CONF_COUNTRY: self.hass.config.country, + }, + ) + if user_input is None: + return self.async_show_form(step_id="user", data_schema=data_schema) + + await self.async_set_unique_id( + f"freegames-{user_input[CONF_LANGUAGE]}-{user_input[CONF_COUNTRY]}" + ) + self._abort_if_unique_id_configured() + + errors = {} + + try: + await validate_input(self.hass, user_input) + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=f"Epic Games Store - Free Games ({user_input[CONF_LANGUAGE]}-{user_input[CONF_COUNTRY]})", + data=user_input, + ) + + return self.async_show_form( + step_id="user", data_schema=data_schema, errors=errors + ) diff --git a/homeassistant/components/epic_games_store/const.py b/homeassistant/components/epic_games_store/const.py new file mode 100644 index 00000000000..c397698fd0c --- /dev/null +++ b/homeassistant/components/epic_games_store/const.py @@ -0,0 +1,31 @@ +"""Constants for the Epic Games Store integration.""" + +from enum import StrEnum + +DOMAIN = "epic_games_store" + +SUPPORTED_LANGUAGES = [ + "ar", + "de", + "en-US", + "es-ES", + "es-MX", + "fr", + "it", + "ja", + "ko", + "pl", + "pt-BR", + "ru", + "th", + "tr", + "zh-CN", + "zh-Hant", +] + + +class CalendarType(StrEnum): + """Calendar types.""" + + FREE = "free" + DISCOUNT = "discount" diff --git a/homeassistant/components/epic_games_store/coordinator.py b/homeassistant/components/epic_games_store/coordinator.py new file mode 100644 index 00000000000..d9c48f5da02 --- /dev/null +++ b/homeassistant/components/epic_games_store/coordinator.py @@ -0,0 +1,81 @@ +"""The Epic Games Store integration data coordinator.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Any + +from epicstore_api import EpicGamesStoreAPI + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN, CalendarType +from .helper import format_game_data + +SCAN_INTERVAL = timedelta(days=1) + +_LOGGER = logging.getLogger(__name__) + + +class EGSCalendarUpdateCoordinator( + DataUpdateCoordinator[dict[str, list[dict[str, Any]]]] +): + """Class to manage fetching data from the Epic Game Store.""" + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + """Initialize.""" + self._api = EpicGamesStoreAPI( + entry.data[CONF_LANGUAGE], + entry.data[CONF_COUNTRY], + ) + self.language = entry.data[CONF_LANGUAGE] + + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + + async def _async_update_data(self) -> dict[str, list[dict[str, Any]]]: + """Update data via library.""" + raw_data = await self.hass.async_add_executor_job(self._api.get_free_games) + _LOGGER.debug(raw_data) + data = raw_data["data"]["Catalog"]["searchStore"]["elements"] + + discount_games = filter( + lambda game: game.get("promotions") + and ( + # Current discount(s) + game["promotions"]["promotionalOffers"] + or + # Upcoming discount(s) + game["promotions"]["upcomingPromotionalOffers"] + ), + data, + ) + + return_data: dict[str, list[dict[str, Any]]] = { + CalendarType.DISCOUNT: [], + CalendarType.FREE: [], + } + for discount_game in discount_games: + game = format_game_data(discount_game, self.language) + + if game["discount_type"]: + return_data[game["discount_type"]].append(game) + + return_data[CalendarType.DISCOUNT] = sorted( + return_data[CalendarType.DISCOUNT], + key=lambda game: game["discount_start_at"], + ) + return_data[CalendarType.FREE] = sorted( + return_data[CalendarType.FREE], key=lambda game: game["discount_start_at"] + ) + + _LOGGER.debug(return_data) + return return_data diff --git a/homeassistant/components/epic_games_store/helper.py b/homeassistant/components/epic_games_store/helper.py new file mode 100644 index 00000000000..2510c7699e5 --- /dev/null +++ b/homeassistant/components/epic_games_store/helper.py @@ -0,0 +1,92 @@ +"""Helper for Epic Games Store.""" + +import contextlib +from typing import Any + +from homeassistant.util import dt as dt_util + + +def format_game_data(raw_game_data: dict[str, Any], language: str) -> dict[str, Any]: + """Format raw API game data for Home Assistant users.""" + img_portrait = None + img_landscape = None + + for image in raw_game_data["keyImages"]: + if image["type"] == "OfferImageTall": + img_portrait = image["url"] + if image["type"] == "OfferImageWide": + img_landscape = image["url"] + + current_promotions = raw_game_data["promotions"]["promotionalOffers"] + upcoming_promotions = raw_game_data["promotions"]["upcomingPromotionalOffers"] + + promotion_data = {} + if ( + current_promotions + and raw_game_data["price"]["totalPrice"]["discountPrice"] == 0 + ): + promotion_data = current_promotions[0]["promotionalOffers"][0] + else: + promotion_data = (current_promotions or upcoming_promotions)[0][ + "promotionalOffers" + ][0] + + return { + "title": raw_game_data["title"].replace("\xa0", " "), + "description": raw_game_data["description"].strip().replace("\xa0", " "), + "released_at": dt_util.parse_datetime(raw_game_data["effectiveDate"]), + "original_price": raw_game_data["price"]["totalPrice"]["fmtPrice"][ + "originalPrice" + ].replace("\xa0", " "), + "publisher": raw_game_data["seller"]["name"], + "url": get_game_url(raw_game_data, language), + "img_portrait": img_portrait, + "img_landscape": img_landscape, + "discount_type": ("free" if is_free_game(raw_game_data) else "discount") + if promotion_data + else None, + "discount_start_at": dt_util.parse_datetime(promotion_data["startDate"]) + if promotion_data + else None, + "discount_end_at": dt_util.parse_datetime(promotion_data["endDate"]) + if promotion_data + else None, + } + + +def get_game_url(raw_game_data: dict[str, Any], language: str) -> str: + """Format raw API game data for Home Assistant users.""" + url_bundle_or_product = "bundles" if raw_game_data["offerType"] == "BUNDLE" else "p" + url_slug: str | None = None + try: + url_slug = raw_game_data["offerMappings"][0]["pageSlug"] + except Exception: # pylint: disable=broad-except + with contextlib.suppress(Exception): + url_slug = raw_game_data["catalogNs"]["mappings"][0]["pageSlug"] + + if not url_slug: + url_slug = raw_game_data["urlSlug"] + + return f"https://store.epicgames.com/{language}/{url_bundle_or_product}/{url_slug}" + + +def is_free_game(game: dict[str, Any]) -> bool: + """Return if the game is free or will be free.""" + return ( + # Current free game(s) + game["promotions"]["promotionalOffers"] + and game["promotions"]["promotionalOffers"][0]["promotionalOffers"][0][ + "discountSetting" + ]["discountPercentage"] + == 0 + and + # Checking current price, maybe not necessary + game["price"]["totalPrice"]["discountPrice"] == 0 + ) or ( + # Upcoming free game(s) + game["promotions"]["upcomingPromotionalOffers"] + and game["promotions"]["upcomingPromotionalOffers"][0]["promotionalOffers"][0][ + "discountSetting" + ]["discountPercentage"] + == 0 + ) diff --git a/homeassistant/components/epic_games_store/manifest.json b/homeassistant/components/epic_games_store/manifest.json new file mode 100644 index 00000000000..665eaec6668 --- /dev/null +++ b/homeassistant/components/epic_games_store/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "epic_games_store", + "name": "Epic Games Store", + "codeowners": ["@hacf-fr", "@Quentame"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/epic_games_store", + "integration_type": "service", + "iot_class": "cloud_polling", + "requirements": ["epicstore-api==0.1.7"] +} diff --git a/homeassistant/components/epic_games_store/strings.json b/homeassistant/components/epic_games_store/strings.json new file mode 100644 index 00000000000..58a87a55f81 --- /dev/null +++ b/homeassistant/components/epic_games_store/strings.json @@ -0,0 +1,38 @@ +{ + "config": { + "step": { + "user": { + "data": { + "language": "Language", + "country": "Country" + } + } + }, + "error": { + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + } + }, + "entity": { + "calendar": { + "free_games": { + "name": "Free games", + "state_attributes": { + "games": { + "name": "Games" + } + } + }, + "discount_games": { + "name": "Discount games", + "state_attributes": { + "games": { + "name": "[%key:component::epic_games_store::entity::calendar::free_games::state_attributes::games::name%]" + } + } + } + } + } +} diff --git a/homeassistant/components/esphome/datetime.py b/homeassistant/components/esphome/datetime.py new file mode 100644 index 00000000000..15509a46158 --- /dev/null +++ b/homeassistant/components/esphome/datetime.py @@ -0,0 +1,48 @@ +"""Support for esphome datetimes.""" + +from __future__ import annotations + +from datetime import datetime + +from aioesphomeapi import DateTimeInfo, DateTimeState + +from homeassistant.components.datetime import DateTimeEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.util.dt as dt_util + +from .entity import EsphomeEntity, esphome_state_property, platform_async_setup_entry + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up esphome datetimes based on a config entry.""" + await platform_async_setup_entry( + hass, + entry, + async_add_entities, + info_type=DateTimeInfo, + entity_type=EsphomeDateTime, + state_type=DateTimeState, + ) + + +class EsphomeDateTime(EsphomeEntity[DateTimeInfo, DateTimeState], DateTimeEntity): + """A datetime implementation for esphome.""" + + @property + @esphome_state_property + def native_value(self) -> datetime | None: + """Return the state of the entity.""" + state = self._state + if state.missing_state: + return None + return dt_util.utc_from_timestamp(state.epoch_seconds) + + async def async_set_value(self, value: datetime) -> None: + """Update the current datetime.""" + self._client.datetime_command(self._key, int(value.timestamp())) diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index 52dc1f17ad6..41b18c9b88c 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -20,9 +20,12 @@ from aioesphomeapi import ( ClimateInfo, CoverInfo, DateInfo, + DateTimeInfo, DeviceInfo, EntityInfo, EntityState, + Event, + EventInfo, FanInfo, LightInfo, LockInfo, @@ -46,9 +49,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.storage import Store -from homeassistant.util.signal_type import SignalType from .const import DOMAIN from .dashboard import async_get_dashboard @@ -68,6 +69,8 @@ INFO_TYPE_TO_PLATFORM: dict[type[EntityInfo], Platform] = { ClimateInfo: Platform.CLIMATE, CoverInfo: Platform.COVER, DateInfo: Platform.DATE, + DateTimeInfo: Platform.DATETIME, + EventInfo: Platform.EVENT, FanInfo: Platform.FAN, LightInfo: Platform.LIGHT, LockInfo: Platform.LOCK, @@ -121,6 +124,9 @@ class RuntimeEntryData: default_factory=dict ) device_update_subscriptions: set[CALLBACK_TYPE] = field(default_factory=set) + static_info_update_subscriptions: set[Callable[[list[EntityInfo]], None]] = field( + default_factory=set + ) loaded_platforms: set[Platform] = field(default_factory=set) platform_load_lock: asyncio.Lock = field(default_factory=asyncio.Lock) _storage_contents: StoreData | None = None @@ -149,11 +155,6 @@ class RuntimeEntryData: "_", " " ) - @property - def signal_static_info_updated(self) -> SignalType[list[EntityInfo]]: - """Return the signal to listen to for updates on static info.""" - return SignalType(f"esphome_{self.entry_id}_on_list") - @callback def async_register_static_info_callback( self, @@ -298,8 +299,9 @@ class RuntimeEntryData: for callback_ in callbacks_: callback_(entity_infos) - # Then send dispatcher event - async_dispatcher_send(hass, self.signal_static_info_updated, infos) + # Finally update static info subscriptions + for callback_ in self.static_info_update_subscriptions: + callback_(infos) @callback def async_subscribe_device_updated(self, callback_: CALLBACK_TYPE) -> CALLBACK_TYPE: @@ -312,6 +314,21 @@ class RuntimeEntryData: """Unsubscribe to device updates.""" self.device_update_subscriptions.remove(callback_) + @callback + def async_subscribe_static_info_updated( + self, callback_: Callable[[list[EntityInfo]], None] + ) -> CALLBACK_TYPE: + """Subscribe to static info updates.""" + self.static_info_update_subscriptions.add(callback_) + return partial(self._async_unsubscribe_static_info_updated, callback_) + + @callback + def _async_unsubscribe_static_info_updated( + self, callback_: Callable[[list[EntityInfo]], None] + ) -> None: + """Unsubscribe to static info updates.""" + self.static_info_update_subscriptions.remove(callback_) + @callback def async_subscribe_state_update( self, @@ -343,7 +360,7 @@ class RuntimeEntryData: if ( current_state == state and subscription_key not in stale_state - and state_type is not CameraState + and state_type not in (CameraState, Event) and not ( state_type is SensorState and (platform_info := self.info.get(SensorInfo)) diff --git a/homeassistant/components/esphome/event.py b/homeassistant/components/esphome/event.py new file mode 100644 index 00000000000..3c7331beba0 --- /dev/null +++ b/homeassistant/components/esphome/event.py @@ -0,0 +1,48 @@ +"""Support for ESPHome event components.""" + +from __future__ import annotations + +from aioesphomeapi import EntityInfo, Event, EventInfo + +from homeassistant.components.event import EventDeviceClass, EventEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.enum import try_parse_enum + +from .entity import EsphomeEntity, platform_async_setup_entry + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up ESPHome event based on a config entry.""" + await platform_async_setup_entry( + hass, + entry, + async_add_entities, + info_type=EventInfo, + entity_type=EsphomeEvent, + state_type=Event, + ) + + +class EsphomeEvent(EsphomeEntity[EventInfo, Event], EventEntity): + """An event implementation for ESPHome.""" + + @callback + def _on_static_info_update(self, static_info: EntityInfo) -> None: + """Set attrs from static info.""" + super()._on_static_info_update(static_info) + static_info = self._static_info + if event_types := static_info.event_types: + self._attr_event_types = event_types + self._attr_device_class = try_parse_enum( + EventDeviceClass, static_info.device_class + ) + + @callback + def _on_state_update(self) -> None: + self._update_state_from_entry_data() + self._trigger_event(self._state.event_type) + self.async_write_ha_state() diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index e700dddbb96..cde44fa3231 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -15,7 +15,7 @@ "iot_class": "local_push", "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "requirements": [ - "aioesphomeapi==24.1.0", + "aioesphomeapi==24.3.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/homeassistant/components/esphome/update.py b/homeassistant/components/esphome/update.py index 3e5a82bbd0b..b16a6e798b7 100644 --- a/homeassistant/components/esphome/update.py +++ b/homeassistant/components/esphome/update.py @@ -17,7 +17,6 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -149,14 +148,9 @@ class ESPHomeUpdateEntity(CoordinatorEntity[ESPHomeDashboard], UpdateEntity): async def async_added_to_hass(self) -> None: """Handle entity added to Home Assistant.""" await super().async_added_to_hass() - hass = self.hass entry_data = self._entry_data self.async_on_remove( - async_dispatcher_connect( - hass, - entry_data.signal_static_info_updated, - self._handle_device_update, - ) + entry_data.async_subscribe_static_info_updated(self._handle_device_update) ) self.async_on_remove( entry_data.async_subscribe_device_updated(self._handle_device_update) diff --git a/homeassistant/components/evohome/__init__.py b/homeassistant/components/evohome/__init__.py index 3017685a307..4564e863e42 100644 --- a/homeassistant/components/evohome/__init__.py +++ b/homeassistant/components/evohome/__init__.py @@ -19,7 +19,10 @@ from evohomeasync2.schema.const import ( SZ_ALLOWED_SYSTEM_MODES, SZ_AUTO_WITH_RESET, SZ_CAN_BE_TEMPORARY, + SZ_GATEWAY_ID, + SZ_GATEWAY_INFO, SZ_HEAT_SETPOINT, + SZ_LOCATION_ID, SZ_LOCATION_INFO, SZ_SETPOINT_STATUS, SZ_STATE_STATUS, @@ -30,7 +33,7 @@ from evohomeasync2.schema.const import ( SZ_TIMING_MODE, SZ_UNTIL, ) -import voluptuous as vol # type: ignore[import-untyped] +import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, @@ -261,14 +264,18 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return False if _LOGGER.isEnabledFor(logging.DEBUG): - _config: dict[str, Any] = { - SZ_LOCATION_INFO: {SZ_TIME_ZONE: None}, - GWS: [{TCS: None}], + loc_info = { + SZ_LOCATION_ID: loc_config[SZ_LOCATION_INFO][SZ_LOCATION_ID], + SZ_TIME_ZONE: loc_config[SZ_LOCATION_INFO][SZ_TIME_ZONE], + } + gwy_info = { + SZ_GATEWAY_ID: loc_config[GWS][0][SZ_GATEWAY_INFO][SZ_GATEWAY_ID], + TCS: loc_config[GWS][0][TCS], + } + _config = { + SZ_LOCATION_INFO: loc_info, + GWS: [{SZ_GATEWAY_INFO: gwy_info, TCS: loc_config[GWS][0][TCS]}], } - _config[SZ_LOCATION_INFO][SZ_TIME_ZONE] = loc_config[SZ_LOCATION_INFO][ - SZ_TIME_ZONE - ] - _config[GWS][0][TCS] = loc_config[GWS][0][TCS] _LOGGER.debug("Config = %s", _config) client_v1 = ev1.EvohomeClient( @@ -455,7 +462,7 @@ class EvoBroker: self.client.access_token_expires # type: ignore[arg-type] ) - app_storage = { + app_storage: dict[str, Any] = { CONF_USERNAME: self.client.username, REFRESH_TOKEN: self.client.refresh_token, ACCESS_TOKEN: self.client.access_token, @@ -463,11 +470,11 @@ class EvoBroker: } if self.client_v1: - app_storage[USER_DATA] = { # type: ignore[assignment] + app_storage[USER_DATA] = { SZ_SESSION_ID: self.client_v1.broker.session_id, } # this is the schema for STORAGE_VER == 1 else: - app_storage[USER_DATA] = {} # type: ignore[assignment] + app_storage[USER_DATA] = {} await self._store.async_save(app_storage) diff --git a/homeassistant/components/feedreader/__init__.py b/homeassistant/components/feedreader/__init__.py index 0a16e986d0b..2b0c6b77559 100644 --- a/homeassistant/components/feedreader/__init__.py +++ b/homeassistant/components/feedreader/__init__.py @@ -117,7 +117,7 @@ class FeedManager: def _update(self) -> struct_time | None: """Update the feed and publish new entries to the event bus.""" _LOGGER.debug("Fetching new data from feed %s", self._url) - self._feed: feedparser.FeedParserDict = feedparser.parse( # type: ignore[no-redef] + self._feed = feedparser.parse( self._url, etag=None if not self._feed else self._feed.get("etag"), modified=None if not self._feed else self._feed.get("modified"), diff --git a/homeassistant/components/fibaro/__init__.py b/homeassistant/components/fibaro/__init__.py index 2c1405130b4..5b7908ddf08 100644 --- a/homeassistant/components/fibaro/__init__.py +++ b/homeassistant/components/fibaro/__init__.py @@ -108,26 +108,21 @@ class FibaroController: # Device infos by fibaro device id self._device_infos: dict[int, DeviceInfo] = {} - def connect(self) -> bool: + def connect(self) -> None: """Start the communication with the Fibaro controller.""" - connected = self._client.connect() + # Return value doesn't need to be checked, + # it is only relevant when connecting without credentials + self._client.connect() info = self._client.read_info() self.hub_serial = info.serial_number self.hub_name = info.hc_name self.hub_model = info.platform self.hub_software_version = info.current_version - if connected is False: - _LOGGER.error( - "Invalid login for Fibaro HC. Please check username and password" - ) - return False - self._room_map = {room.fibaro_id: room for room in self._client.read_rooms()} self._read_devices() self._scenes = self._client.read_scenes() - return True def connect_with_error_handling(self) -> None: """Translate connect errors to easily differentiate auth and connect failures. @@ -135,9 +130,7 @@ class FibaroController: When there is a better error handling in the used library this can be improved. """ try: - connected = self.connect() - if not connected: - raise FibaroConnectFailed("Connect status is false") + self.connect() except HTTPError as http_ex: if http_ex.response.status_code == 403: raise FibaroAuthFailed from http_ex @@ -382,7 +375,7 @@ class FibaroController: pass -def _init_controller(data: Mapping[str, Any]) -> FibaroController: +def init_controller(data: Mapping[str, Any]) -> FibaroController: """Validate the user input allows us to connect to fibaro.""" controller = FibaroController(data) controller.connect_with_error_handling() @@ -395,7 +388,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: The unique id of the config entry is the serial number of the home center. """ try: - controller = await hass.async_add_executor_job(_init_controller, entry.data) + controller = await hass.async_add_executor_job(init_controller, entry.data) except FibaroConnectFailed as connect_ex: raise ConfigEntryNotReady( f"Could not connect to controller at {entry.data[CONF_URL]}" @@ -454,37 +447,38 @@ class FibaroDevice(Entity): if not fibaro_device.visible: self._attr_entity_registry_visible_default = False - async def async_added_to_hass(self): + async def async_added_to_hass(self) -> None: """Call when entity is added to hass.""" self.controller.register(self.fibaro_device.fibaro_id, self._update_callback) - def _update_callback(self): + def _update_callback(self) -> None: """Update the state.""" self.schedule_update_ha_state(True) @property - def level(self): + def level(self) -> int | None: """Get the level of Fibaro device.""" if self.fibaro_device.value.has_value: return self.fibaro_device.value.int_value() return None @property - def level2(self): + def level2(self) -> int | None: """Get the tilt level of Fibaro device.""" if self.fibaro_device.value_2.has_value: return self.fibaro_device.value_2.int_value() return None - def dont_know_message(self, action): + def dont_know_message(self, cmd: str) -> None: """Make a warning in case we don't know how to perform an action.""" _LOGGER.warning( - "Not sure how to setValue: %s (available actions: %s)", + "Not sure how to %s: %s (available actions: %s)", + cmd, str(self.ha_id), str(self.fibaro_device.actions), ) - def set_level(self, level): + def set_level(self, level: int) -> None: """Set the level of Fibaro device.""" self.action("setValue", level) if self.fibaro_device.value.has_value: @@ -492,21 +486,21 @@ class FibaroDevice(Entity): if self.fibaro_device.has_brightness: self.fibaro_device.properties["brightness"] = level - def set_level2(self, level): + def set_level2(self, level: int) -> None: """Set the level2 of Fibaro device.""" self.action("setValue2", level) if self.fibaro_device.value_2.has_value: self.fibaro_device.properties["value2"] = level - def call_turn_on(self): + def call_turn_on(self) -> None: """Turn on the Fibaro device.""" self.action("turnOn") - def call_turn_off(self): + def call_turn_off(self) -> None: """Turn off the Fibaro device.""" self.action("turnOff") - def call_set_color(self, red, green, blue, white): + def call_set_color(self, red: int, green: int, blue: int, white: int) -> None: """Set the color of Fibaro device.""" red = int(max(0, min(255, red))) green = int(max(0, min(255, green))) @@ -516,7 +510,7 @@ class FibaroDevice(Entity): self.fibaro_device.properties["color"] = color_str self.action("setColor", str(red), str(green), str(blue), str(white)) - def action(self, cmd, *args): + def action(self, cmd: str, *args: Any) -> None: """Perform an action on the Fibaro HC.""" if cmd in self.fibaro_device.actions: self.fibaro_device.execute_action(cmd, args) @@ -525,12 +519,12 @@ class FibaroDevice(Entity): self.dont_know_message(cmd) @property - def current_binary_state(self): + def current_binary_state(self) -> bool: """Return the current binary state.""" return self.fibaro_device.value.bool_value(False) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> Mapping[str, Any]: """Return the state attributes of the device.""" attr = {"fibaro_id": self.fibaro_device.fibaro_id} diff --git a/homeassistant/components/fibaro/binary_sensor.py b/homeassistant/components/fibaro/binary_sensor.py index c0980025555..3c965c11b34 100644 --- a/homeassistant/components/fibaro/binary_sensor.py +++ b/homeassistant/components/fibaro/binary_sensor.py @@ -76,9 +76,9 @@ class FibaroBinarySensor(FibaroDevice, BinarySensorEntity): self._attr_icon = SENSOR_TYPES[self._fibaro_sensor_type][1] @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: + def extra_state_attributes(self) -> Mapping[str, Any]: """Return the extra state attributes of the device.""" - return super().extra_state_attributes | self._own_extra_state_attributes + return {**super().extra_state_attributes, **self._own_extra_state_attributes} def update(self) -> None: """Get the latest data and update the state.""" diff --git a/homeassistant/components/fibaro/config_flow.py b/homeassistant/components/fibaro/config_flow.py index 8c2fb502488..9003704348d 100644 --- a/homeassistant/components/fibaro/config_flow.py +++ b/homeassistant/components/fibaro/config_flow.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResu from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import HomeAssistant -from . import FibaroAuthFailed, FibaroConnectFailed, FibaroController +from . import FibaroAuthFailed, FibaroConnectFailed, init_controller from .const import CONF_IMPORT_PLUGINS, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -28,19 +28,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema( ) -def _connect_to_fibaro(data: dict[str, Any]) -> FibaroController: - """Validate the user input allows us to connect to fibaro.""" - controller = FibaroController(data) - controller.connect_with_error_handling() - return controller - - async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ - controller = await hass.async_add_executor_job(_connect_to_fibaro, data) + controller = await hass.async_add_executor_job(init_controller, data) _LOGGER.debug( "Successfully connected to fibaro home center %s with name %s", diff --git a/homeassistant/components/fibaro/cover.py b/homeassistant/components/fibaro/cover.py index 16be6e98ae1..e71ae8982e7 100644 --- a/homeassistant/components/fibaro/cover.py +++ b/homeassistant/components/fibaro/cover.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from pyfibaro.fibaro_device import DeviceModel @@ -80,11 +80,11 @@ class FibaroCover(FibaroDevice, CoverEntity): def set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - self.set_level(kwargs.get(ATTR_POSITION)) + self.set_level(cast(int, kwargs.get(ATTR_POSITION))) def set_cover_tilt_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - self.set_level2(kwargs.get(ATTR_TILT_POSITION)) + self.set_level2(cast(int, kwargs.get(ATTR_TILT_POSITION))) @property def is_closed(self) -> bool | None: diff --git a/homeassistant/components/fibaro/manifest.json b/homeassistant/components/fibaro/manifest.json index bb1558f998b..39850672d06 100644 --- a/homeassistant/components/fibaro/manifest.json +++ b/homeassistant/components/fibaro/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pyfibaro"], - "requirements": ["pyfibaro==0.7.7"] + "requirements": ["pyfibaro==0.7.8"] } diff --git a/homeassistant/components/folder_watcher/__init__.py b/homeassistant/components/folder_watcher/__init__.py index d111fe03c5c..3f0b9e8f6da 100644 --- a/homeassistant/components/folder_watcher/__init__.py +++ b/homeassistant/components/folder_watcher/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import os -from typing import cast +from typing import Any, cast import voluptuous as vol from watchdog.events import ( @@ -19,17 +19,17 @@ from watchdog.events import ( ) from watchdog.observers import Observer +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType +from .const import CONF_FOLDER, CONF_PATTERNS, DEFAULT_PATTERN, DOMAIN + _LOGGER = logging.getLogger(__name__) -CONF_FOLDER = "folder" -CONF_PATTERNS = "patterns" -DEFAULT_PATTERN = "*" -DOMAIN = "folder_watcher" CONFIG_SCHEMA = vol.Schema( { @@ -51,20 +51,62 @@ CONFIG_SCHEMA = vol.Schema( ) -def setup(hass: HomeAssistant, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the folder watcher.""" - conf = config[DOMAIN] - for watcher in conf: - path: str = watcher[CONF_FOLDER] - patterns: list[str] = watcher[CONF_PATTERNS] - if not hass.config.is_allowed_path(path): - _LOGGER.error("Folder %s is not valid or allowed", path) - return False - Watcher(path, patterns, hass) + if DOMAIN in config: + conf: list[dict[str, Any]] = config[DOMAIN] + for watcher in conf: + path: str = watcher[CONF_FOLDER] + if not hass.config.is_allowed_path(path): + async_create_issue( + hass, + DOMAIN, + f"import_failed_not_allowed_path_{path}", + is_fixable=False, + is_persistent=False, + severity=IssueSeverity.ERROR, + translation_key="import_failed_not_allowed_path", + translation_placeholders={ + "path": path, + "config_variable": "allowlist_external_dirs", + }, + ) + continue + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=watcher + ) + ) return True +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Folder watcher from a config entry.""" + + path: str = entry.options[CONF_FOLDER] + patterns: list[str] = entry.options[CONF_PATTERNS] + if not hass.config.is_allowed_path(path): + _LOGGER.error("Folder %s is not valid or allowed", path) + async_create_issue( + hass, + DOMAIN, + f"setup_not_allowed_path_{path}", + is_fixable=False, + is_persistent=False, + severity=IssueSeverity.ERROR, + translation_key="setup_not_allowed_path", + translation_placeholders={ + "path": path, + "config_variable": "allowlist_external_dirs", + }, + learn_more_url="https://www.home-assistant.io/docs/configuration/basic/#allowlist_external_dirs", + ) + return False + await hass.async_add_executor_job(Watcher, path, patterns, hass) + return True + + def create_event_handler(patterns: list[str], hass: HomeAssistant) -> EventHandler: """Return the Watchdog EventHandler object.""" diff --git a/homeassistant/components/folder_watcher/config_flow.py b/homeassistant/components/folder_watcher/config_flow.py new file mode 100644 index 00000000000..50d198df3c3 --- /dev/null +++ b/homeassistant/components/folder_watcher/config_flow.py @@ -0,0 +1,116 @@ +"""Adds config flow for Folder watcher.""" + +from __future__ import annotations + +from collections.abc import Mapping +import os +from typing import Any + +import voluptuous as vol + +from homeassistant.components.homeassistant import DOMAIN as HOMEASSISTANT_DOMAIN +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.core import callback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowError, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import ( + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, + TextSelector, +) + +from .const import CONF_FOLDER, CONF_PATTERNS, DEFAULT_PATTERN, DOMAIN + + +async def validate_setup( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Check path is a folder.""" + value: str = user_input[CONF_FOLDER] + dir_in = os.path.expanduser(str(value)) + handler.parent_handler._async_abort_entries_match({CONF_FOLDER: value}) # pylint: disable=protected-access + + if not os.path.isdir(dir_in): + raise SchemaFlowError("not_dir") + if not os.access(dir_in, os.R_OK): + raise SchemaFlowError("not_readable_dir") + if not handler.parent_handler.hass.config.is_allowed_path(value): + raise SchemaFlowError("not_allowed_dir") + + return user_input + + +async def validate_import_setup( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Create issue on successful import.""" + async_create_issue( + handler.parent_handler.hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.11.0", + is_fixable=False, + is_persistent=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Folder Watcher", + }, + ) + return user_input + + +OPTIONS_SCHEMA = vol.Schema( + { + vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): SelectSelector( + SelectSelectorConfig( + options=[DEFAULT_PATTERN], + multiple=True, + custom_value=True, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + } +) +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_FOLDER): TextSelector(), + } +).extend(OPTIONS_SCHEMA.schema) + +CONFIG_FLOW = { + "user": SchemaFlowFormStep(schema=DATA_SCHEMA, validate_user_input=validate_setup), + "import": SchemaFlowFormStep( + schema=DATA_SCHEMA, validate_user_input=validate_import_setup + ), +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep(schema=OPTIONS_SCHEMA), +} + + +class FolderWatcherConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Folder Watcher.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + return f"Folder Watcher {options[CONF_FOLDER]}" + + @callback + def async_create_entry( + self, data: Mapping[str, Any], **kwargs: Any + ) -> ConfigFlowResult: + """Finish config flow and create a config entry.""" + self._async_abort_entries_match({CONF_FOLDER: data[CONF_FOLDER]}) + return super().async_create_entry(data, **kwargs) diff --git a/homeassistant/components/folder_watcher/const.py b/homeassistant/components/folder_watcher/const.py new file mode 100644 index 00000000000..22dae3b9164 --- /dev/null +++ b/homeassistant/components/folder_watcher/const.py @@ -0,0 +1,6 @@ +"""Constants for Folder watcher.""" + +CONF_FOLDER = "folder" +CONF_PATTERNS = "patterns" +DEFAULT_PATTERN = "*" +DOMAIN = "folder_watcher" diff --git a/homeassistant/components/folder_watcher/manifest.json b/homeassistant/components/folder_watcher/manifest.json index 96decd0b8cf..7b471e08fcc 100644 --- a/homeassistant/components/folder_watcher/manifest.json +++ b/homeassistant/components/folder_watcher/manifest.json @@ -2,6 +2,7 @@ "domain": "folder_watcher", "name": "Folder Watcher", "codeowners": [], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/folder_watcher", "iot_class": "local_polling", "loggers": ["watchdog"], diff --git a/homeassistant/components/folder_watcher/strings.json b/homeassistant/components/folder_watcher/strings.json new file mode 100644 index 00000000000..bd1742b8ce3 --- /dev/null +++ b/homeassistant/components/folder_watcher/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "error": { + "not_dir": "Configured path is not a directory", + "not_readable_dir": "Configured path is not readable", + "not_allowed_dir": "Configured path is not in allowlist" + }, + "step": { + "user": { + "data": { + "folder": "Path to the watched folder", + "patterns": "Pattern(s) to monitor" + }, + "data_description": { + "folder": "Path needs to be from root, as example `/config`", + "patterns": "Example: `*.yaml` to only see yaml files" + } + } + } + }, + "options": { + "step": { + "init": { + "data": { + "patterns": "[%key:component::folder_watcher::config::step::user::data::patterns%]" + }, + "data_description": { + "patterns": "[%key:component::folder_watcher::config::step::user::data_description::patterns%]" + } + } + } + }, + "issues": { + "import_failed_not_allowed_path": { + "title": "The Folder Watcher YAML configuration could not be imported", + "description": "Configuring Folder Watcher using YAML is being removed but your configuration could not be imported as the folder {path} is not in the configured allowlist.\n\nPlease add it to `{config_variable}` in config.yaml and restart Home Assistant to import it and fix this issue." + }, + "setup_not_allowed_path": { + "title": "The Folder Watcher configuration for {path} could not start", + "description": "The path {path} is not accessible or not allowed to be accessed.\n\nPlease check the path is accessible and add it to `{config_variable}` in config.yaml and restart Home Assistant to fix this issue." + } + } +} diff --git a/homeassistant/components/fritz/__init__.py b/homeassistant/components/fritz/__init__.py index ba9e2191901..bab97569eda 100644 --- a/homeassistant/components/fritz/__init__.py +++ b/homeassistant/components/fritz/__init__.py @@ -3,13 +3,20 @@ import logging from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_SSL, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from .common import AvmWrapper, FritzData from .const import ( DATA_FRITZ, + DEFAULT_SSL, DOMAIN, FRITZ_AUTH_EXCEPTIONS, FRITZ_EXCEPTIONS, @@ -29,6 +36,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: port=entry.data[CONF_PORT], username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], + use_tls=entry.data.get(CONF_SSL, DEFAULT_SSL), ) try: diff --git a/homeassistant/components/fritz/common.py b/homeassistant/components/fritz/common.py index e4d5e92b742..f051c824847 100644 --- a/homeassistant/components/fritz/common.py +++ b/homeassistant/components/fritz/common.py @@ -48,7 +48,7 @@ from .const import ( DEFAULT_CONF_OLD_DISCOVERY, DEFAULT_DEVICE_NAME, DEFAULT_HOST, - DEFAULT_PORT, + DEFAULT_SSL, DEFAULT_USERNAME, DOMAIN, FRITZ_EXCEPTIONS, @@ -184,9 +184,10 @@ class FritzBoxTools( self, hass: HomeAssistant, password: str, + port: int, username: str = DEFAULT_USERNAME, host: str = DEFAULT_HOST, - port: int = DEFAULT_PORT, + use_tls: bool = DEFAULT_SSL, ) -> None: """Initialize FritzboxTools class.""" super().__init__( @@ -211,6 +212,7 @@ class FritzBoxTools( self.password = password self.port = port self.username = username + self.use_tls = use_tls self.has_call_deflections: bool = False self._model: str | None = None self._current_firmware: str | None = None @@ -230,11 +232,13 @@ class FritzBoxTools( def setup(self) -> None: """Set up FritzboxTools class.""" + self.connection = FritzConnection( address=self.host, port=self.port, user=self.username, password=self.password, + use_tls=self.use_tls, timeout=60.0, pool_maxsize=30, ) diff --git a/homeassistant/components/fritz/config_flow.py b/homeassistant/components/fritz/config_flow.py index a217adf935c..fdafd486b29 100644 --- a/homeassistant/components/fritz/config_flow.py +++ b/homeassistant/components/fritz/config_flow.py @@ -25,14 +25,22 @@ from homeassistant.config_entries import ( OptionsFlow, OptionsFlowWithConfigEntry, ) -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_SSL, + CONF_USERNAME, +) from homeassistant.core import callback from .const import ( CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY, DEFAULT_HOST, - DEFAULT_PORT, + DEFAULT_HTTP_PORT, + DEFAULT_HTTPS_PORT, + DEFAULT_SSL, DOMAIN, ERROR_AUTH_INVALID, ERROR_CANNOT_CONNECT, @@ -61,6 +69,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._entry: ConfigEntry | None = None self._name: str = "" self._password: str = "" + self._use_tls: bool = False self._port: int | None = None self._username: str = "" self._model: str = "" @@ -74,6 +83,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): port=self._port, user=self._username, password=self._password, + use_tls=self._use_tls, timeout=60.0, pool_maxsize=30, ) @@ -120,6 +130,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): CONF_PASSWORD: self._password, CONF_PORT: self._port, CONF_USERNAME: self._username, + CONF_SSL: self._use_tls, }, options={ CONF_CONSIDER_HOME: DEFAULT_CONSIDER_HOME.total_seconds(), @@ -127,13 +138,18 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): }, ) + def _determine_port(self, user_input: dict[str, Any]) -> int: + """Determine port from user_input.""" + if port := user_input.get(CONF_PORT): + return int(port) + return DEFAULT_HTTPS_PORT if user_input[CONF_SSL] else DEFAULT_HTTP_PORT + async def async_step_ssdp( self, discovery_info: ssdp.SsdpServiceInfo ) -> ConfigFlowResult: """Handle a flow initialized by discovery.""" ssdp_location: ParseResult = urlparse(discovery_info.ssdp_location or "") self._host = ssdp_location.hostname - self._port = ssdp_location.port self._name = ( discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME) or discovery_info.upnp[ssdp.ATTR_UPNP_MODEL_NAME] @@ -178,6 +194,8 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] + self._use_tls = user_input[CONF_SSL] + self._port = self._determine_port(user_input) error = await self.hass.async_add_executor_job(self.fritz_tools_init) @@ -191,14 +209,22 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self, errors: dict[str, str] | None = None ) -> ConfigFlowResult: """Show the setup form to the user.""" + + advanced_data_schema = {} + if self.show_advanced_options: + advanced_data_schema = { + vol.Optional(CONF_PORT): vol.Coerce(int), + } + return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Optional(CONF_HOST, default=DEFAULT_HOST): str, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): vol.Coerce(int), + **advanced_data_schema, vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, + vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool, } ), errors=errors or {}, @@ -214,6 +240,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, + vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool, } ), description_placeholders={"name": self._name}, @@ -227,9 +254,11 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is None: return self._show_setup_form_init() self._host = user_input[CONF_HOST] - self._port = user_input[CONF_PORT] self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] + self._use_tls = user_input[CONF_SSL] + + self._port = self._determine_port(user_input) if not (error := await self.hass.async_add_executor_job(self.fritz_tools_init)): self._name = self._model @@ -251,6 +280,8 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): self._port = entry_data[CONF_PORT] self._username = entry_data[CONF_USERNAME] self._password = entry_data[CONF_PASSWORD] + self._use_tls = entry_data[CONF_SSL] + return await self.async_step_reauth_confirm() def _show_setup_form_reauth_confirm( @@ -295,11 +326,83 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): CONF_PASSWORD: self._password, CONF_PORT: self._port, CONF_USERNAME: self._username, + CONF_SSL: self._use_tls, }, ) await self.hass.config_entries.async_reload(self._entry.entry_id) return self.async_abort(reason="reauth_successful") + async def async_step_reconfigure(self, _: Mapping[str, Any]) -> ConfigFlowResult: + """Handle reconfigure flow .""" + self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert self._entry + self._host = self._entry.data[CONF_HOST] + self._port = self._entry.data[CONF_PORT] + self._username = self._entry.data[CONF_USERNAME] + self._password = self._entry.data[CONF_PASSWORD] + self._use_tls = self._entry.data.get(CONF_SSL, DEFAULT_SSL) + + return await self.async_step_reconfigure_confirm() + + def _show_setup_form_reconfigure_confirm( + self, user_input: dict[str, Any], errors: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Show the reconfigure form to the user.""" + advanced_data_schema = {} + if self.show_advanced_options: + advanced_data_schema = { + vol.Optional(CONF_PORT, default=user_input[CONF_PORT]): vol.Coerce(int), + } + + return self.async_show_form( + step_id="reconfigure_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str, + **advanced_data_schema, + vol.Required(CONF_SSL, default=user_input[CONF_SSL]): bool, + } + ), + description_placeholders={"host": self._host}, + errors=errors or {}, + ) + + async def async_step_reconfigure_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfigure flow.""" + if user_input is None: + return self._show_setup_form_reconfigure_confirm( + { + CONF_HOST: self._host, + CONF_PORT: self._port, + CONF_SSL: self._use_tls, + } + ) + + self._host = user_input[CONF_HOST] + self._use_tls = user_input[CONF_SSL] + self._port = self._determine_port(user_input) + + if error := await self.hass.async_add_executor_job(self.fritz_tools_init): + return self._show_setup_form_reconfigure_confirm( + user_input={**user_input, CONF_PORT: self._port}, errors={"base": error} + ) + + assert isinstance(self._entry, ConfigEntry) + self.hass.config_entries.async_update_entry( + self._entry, + data={ + CONF_HOST: self._host, + CONF_PASSWORD: self._password, + CONF_PORT: self._port, + CONF_USERNAME: self._username, + CONF_SSL: self._use_tls, + }, + ) + await self.hass.config_entries.async_reload(self._entry.entry_id) + return self.async_abort(reason="reconfigure_successful") + class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry): """Handle an options flow.""" diff --git a/homeassistant/components/fritz/const.py b/homeassistant/components/fritz/const.py index caa7d44c378..3794a83dd7f 100644 --- a/homeassistant/components/fritz/const.py +++ b/homeassistant/components/fritz/const.py @@ -46,8 +46,10 @@ DSL_CONNECTION: Literal["dsl"] = "dsl" DEFAULT_DEVICE_NAME = "Unknown device" DEFAULT_HOST = "192.168.178.1" -DEFAULT_PORT = 49000 +DEFAULT_HTTP_PORT = 49000 +DEFAULT_HTTPS_PORT = 49443 DEFAULT_USERNAME = "" +DEFAULT_SSL = False ERROR_AUTH_INVALID = "invalid_auth" ERROR_CANNOT_CONNECT = "cannot_connect" diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 5eed2f59fc4..a96c3b8ac28 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -18,6 +18,19 @@ "password": "[%key:common::config_flow::data::password%]" } }, + "reconfigure_confirm": { + "title": "Updating FRITZ!Box Tools - configuration", + "description": "Update FRITZ!Box Tools configuration for: {host}.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]", + "ssl": "[%key:common::config_flow::data::ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of your FRITZ!Box router.", + "port": "Leave it empty to use the default port." + } + }, "user": { "title": "[%key:component::fritz::config::step::confirm::title%]", "description": "Set up FRITZ!Box Tools to control your FRITZ!Box.\nMinimum needed: username, password.", @@ -25,10 +38,12 @@ "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "ssl": "[%key:common::config_flow::data::ssl%]" }, "data_description": { - "host": "The hostname or IP address of your FRITZ!Box router." + "host": "The hostname or IP address of your FRITZ!Box router.", + "port": "Leave it empty to use the default port." } } }, @@ -36,7 +51,8 @@ "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "ignore_ip6_link_local": "IPv6 link local address is not supported.", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", diff --git a/homeassistant/components/fritzbox/__init__.py b/homeassistant/components/fritzbox/__init__.py index 7f4006768c4..904a86d21ae 100644 --- a/homeassistant/components/fritzbox/__init__.py +++ b/homeassistant/components/fritzbox/__init__.py @@ -51,12 +51,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: has_templates = await hass.async_add_executor_job(fritz.has_templates) LOGGER.debug("enable smarthome templates: %s", has_templates) - coordinator = FritzboxDataUpdateCoordinator(hass, entry, has_templates) - - await coordinator.async_config_entry_first_refresh() - - hass.data[DOMAIN][entry.entry_id][CONF_COORDINATOR] = coordinator - def _update_unique_id(entry: RegistryEntry) -> dict[str, str] | None: """Update unique ID of entity entry.""" if ( @@ -79,6 +73,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await async_migrate_entries(hass, entry.entry_id, _update_unique_id) + coordinator = FritzboxDataUpdateCoordinator(hass, entry.entry_id, has_templates) + await coordinator.async_setup() + hass.data[DOMAIN][entry.entry_id][CONF_COORDINATOR] = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) def logout_fritzbox(event: Event) -> None: diff --git a/homeassistant/components/fritzbox/config_flow.py b/homeassistant/components/fritzbox/config_flow.py index c89415fa7ee..62f189b542f 100644 --- a/homeassistant/components/fritzbox/config_flow.py +++ b/homeassistant/components/fritzbox/config_flow.py @@ -221,3 +221,44 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={"name": self._name}, errors=errors, ) + + async def async_step_reconfigure( + self, _: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert entry is not None + self._entry = entry + self._name = self._entry.data[CONF_HOST] + self._host = self._entry.data[CONF_HOST] + self._username = self._entry.data[CONF_USERNAME] + self._password = self._entry.data[CONF_PASSWORD] + + return await self.async_step_reconfigure_confirm() + + async def async_step_reconfigure_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + errors = {} + + if user_input is not None: + self._host = user_input[CONF_HOST] + + result = await self.hass.async_add_executor_job(self._try_connect) + + if result == RESULT_SUCCESS: + await self._update_entry() + return self.async_abort(reason="reconfigure_successful") + errors["base"] = result + + return self.async_show_form( + step_id="reconfigure_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_HOST, default=self._host): str, + } + ), + description_placeholders={"name": self._name}, + errors=errors, + ) diff --git a/homeassistant/components/fritzbox/coordinator.py b/homeassistant/components/fritzbox/coordinator.py index c58665f2b5d..54af8fbdacd 100644 --- a/homeassistant/components/fritzbox/coordinator.py +++ b/homeassistant/components/fritzbox/coordinator.py @@ -12,6 +12,7 @@ from requests.exceptions import ConnectionError as RequestConnectionError, HTTPE from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import CONF_CONNECTIONS, DOMAIN, LOGGER @@ -28,34 +29,62 @@ class FritzboxCoordinatorData: class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorData]): """Fritzbox Smarthome device data update coordinator.""" + config_entry: ConfigEntry configuration_url: str - def __init__( - self, hass: HomeAssistant, entry: ConfigEntry, has_templates: bool - ) -> None: + def __init__(self, hass: HomeAssistant, name: str, has_templates: bool) -> None: """Initialize the Fritzbox Smarthome device coordinator.""" - self.entry = entry - self.fritz: Fritzhome = hass.data[DOMAIN][self.entry.entry_id][CONF_CONNECTIONS] + super().__init__( + hass, + LOGGER, + name=name, + update_interval=timedelta(seconds=30), + ) + + self.fritz: Fritzhome = hass.data[DOMAIN][self.config_entry.entry_id][ + CONF_CONNECTIONS + ] self.configuration_url = self.fritz.get_prefixed_host() self.has_templates = has_templates self.new_devices: set[str] = set() self.new_templates: set[str] = set() - super().__init__( - hass, - LOGGER, - name=entry.entry_id, - update_interval=timedelta(seconds=30), + self.data = FritzboxCoordinatorData({}, {}) + + async def async_setup(self) -> None: + """Set up the coordinator.""" + await self.async_config_entry_first_refresh() + self.cleanup_removed_devices( + list(self.data.devices) + list(self.data.templates) ) - self.data = FritzboxCoordinatorData({}, {}) + def cleanup_removed_devices(self, available_ains: list[str]) -> None: + """Cleanup entity and device registry from removed devices.""" + entity_reg = er.async_get(self.hass) + for entity in er.async_entries_for_config_entry( + entity_reg, self.config_entry.entry_id + ): + if entity.unique_id.split("_")[0] not in available_ains: + LOGGER.debug("Removing obsolete entity entry %s", entity.entity_id) + entity_reg.async_remove(entity.entity_id) + + device_reg = dr.async_get(self.hass) + identifiers = {(DOMAIN, ain) for ain in available_ains} + for device in dr.async_entries_for_config_entry( + device_reg, self.config_entry.entry_id + ): + if not set(device.identifiers) & identifiers: + LOGGER.debug("Removing obsolete device entry %s", device.name) + device_reg.async_update_device( + device.id, remove_config_entry_id=self.config_entry.entry_id + ) def _update_fritz_devices(self) -> FritzboxCoordinatorData: """Update all fritzbox device data.""" try: - self.fritz.update_devices() + self.fritz.update_devices(ignore_removed=False) if self.has_templates: - self.fritz.update_templates() + self.fritz.update_templates(ignore_removed=False) except RequestConnectionError as ex: raise UpdateFailed from ex except HTTPError: @@ -64,9 +93,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat self.fritz.login() except LoginError as ex: raise ConfigEntryAuthFailed from ex - self.fritz.update_devices() + self.fritz.update_devices(ignore_removed=False) if self.has_templates: - self.fritz.update_templates() + self.fritz.update_templates(ignore_removed=False) devices = self.fritz.get_devices() device_data = {} @@ -99,4 +128,14 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat async def _async_update_data(self) -> FritzboxCoordinatorData: """Fetch all device data.""" - return await self.hass.async_add_executor_job(self._update_fritz_devices) + new_data = await self.hass.async_add_executor_job(self._update_fritz_devices) + + if ( + self.data.devices.keys() - new_data.devices.keys() + or self.data.templates.keys() - new_data.templates.keys() + ): + self.cleanup_removed_devices( + list(new_data.devices) + list(new_data.templates) + ) + + return new_data diff --git a/homeassistant/components/fritzbox/manifest.json b/homeassistant/components/fritzbox/manifest.json index 5d41f8c12dc..de2e9e0200a 100644 --- a/homeassistant/components/fritzbox/manifest.json +++ b/homeassistant/components/fritzbox/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_polling", "loggers": ["pyfritzhome"], "quality_scale": "gold", - "requirements": ["pyfritzhome==0.6.10"], + "requirements": ["pyfritzhome==0.6.11"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:fritzbox:1" diff --git a/homeassistant/components/fritzbox/strings.json b/homeassistant/components/fritzbox/strings.json index f4d2fe3670e..755cc97d7d8 100644 --- a/homeassistant/components/fritzbox/strings.json +++ b/homeassistant/components/fritzbox/strings.json @@ -26,6 +26,15 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } + }, + "reconfigure_confirm": { + "description": "Update your configuration information for {name}.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of your FRITZ!Box router." + } } }, "abort": { @@ -34,7 +43,8 @@ "ignore_ip6_link_local": "IPv6 link local address is not supported.", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "not_supported": "Connected to AVM FRITZ!Box but it's unable to control Smart Home devices.", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index d711314cabb..ad63bdbed84 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240404.2"] + "requirements": ["home-assistant-frontend==20240424.1"] } diff --git a/homeassistant/components/geniushub/water_heater.py b/homeassistant/components/geniushub/water_heater.py index 6c3b5223ef9..f17560ebc62 100644 --- a/homeassistant/components/geniushub/water_heater.py +++ b/homeassistant/components/geniushub/water_heater.py @@ -75,9 +75,9 @@ class GeniusWaterHeater(GeniusHeatingZone, WaterHeaterEntity): return list(HA_OPMODE_TO_GH) @property - def current_operation(self) -> str: + def current_operation(self) -> str | None: """Return the current operation mode.""" - return GH_STATE_TO_HA[self._zone.data["mode"]] # type: ignore[return-value] + return GH_STATE_TO_HA[self._zone.data["mode"]] async def async_set_operation_mode(self, operation_mode: str) -> None: """Set a new operation mode for this boiler.""" diff --git a/homeassistant/components/gios/manifest.json b/homeassistant/components/gios/manifest.json index 2e33bc6741e..b509806d07f 100644 --- a/homeassistant/components/gios/manifest.json +++ b/homeassistant/components/gios/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["dacite", "gios"], "quality_scale": "platinum", - "requirements": ["gios==3.2.2"] + "requirements": ["gios==4.0.0"] } diff --git a/homeassistant/components/goodwe/manifest.json b/homeassistant/components/goodwe/manifest.json index 03575f9f4e2..6f1bdd2b449 100644 --- a/homeassistant/components/goodwe/manifest.json +++ b/homeassistant/components/goodwe/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/goodwe", "iot_class": "local_polling", "loggers": ["goodwe"], - "requirements": ["goodwe==0.2.32"] + "requirements": ["goodwe==0.3.2"] } diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index 00561cb5fd6..ac43dc58953 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/calendar.google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==6.0.4", "oauth2client==4.1.3", "ical==7.0.3"] + "requirements": ["gcal-sync==6.0.4", "oauth2client==4.1.3", "ical==8.0.0"] } diff --git a/homeassistant/components/google_tasks/__init__.py b/homeassistant/components/google_tasks/__init__.py index b62bd0fe5a2..29a1b20f2bc 100644 --- a/homeassistant/components/google_tasks/__init__.py +++ b/homeassistant/components/google_tasks/__init__.py @@ -2,12 +2,12 @@ from __future__ import annotations -from aiohttp import ClientError +from aiohttp import ClientError, ClientResponseError from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import config_entry_oauth2_flow from . import api @@ -18,8 +18,6 @@ PLATFORMS: list[Platform] = [Platform.TODO] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Google Tasks from a config entry.""" - hass.data.setdefault(DOMAIN, {}) - implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( hass, entry @@ -29,10 +27,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: auth = api.AsyncConfigEntryAuth(hass, session) try: await auth.async_get_access_token() + except ClientResponseError as err: + if 400 <= err.status < 500: + raise ConfigEntryAuthFailed( + "OAuth session is not valid, reauth required" + ) from err + raise ConfigEntryNotReady from err except ClientError as err: raise ConfigEntryNotReady from err - hass.data[DOMAIN][entry.entry_id] = auth + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = auth await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/google_tasks/config_flow.py b/homeassistant/components/google_tasks/config_flow.py index a8e283b55c8..a9ef5c7ff23 100644 --- a/homeassistant/components/google_tasks/config_flow.py +++ b/homeassistant/components/google_tasks/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Google Tasks.""" +from collections.abc import Mapping import logging from typing import Any @@ -8,7 +9,7 @@ from googleapiclient.discovery import build from googleapiclient.errors import HttpError from googleapiclient.http import HttpRequest -from homeassistant.config_entries import ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow @@ -22,6 +23,8 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN + reauth_entry: ConfigEntry | None = None + @property def logger(self) -> logging.Logger: """Return logger.""" @@ -39,11 +42,21 @@ class OAuth2FlowHandler( async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow.""" + credentials = Credentials(token=data[CONF_TOKEN][CONF_ACCESS_TOKEN]) try: + user_resource = build( + "oauth2", + "v2", + credentials=credentials, + ) + user_resource_cmd: HttpRequest = user_resource.userinfo().get() + user_resource_info = await self.hass.async_add_executor_job( + user_resource_cmd.execute + ) resource = build( "tasks", "v1", - credentials=Credentials(token=data[CONF_TOKEN][CONF_ACCESS_TOKEN]), + credentials=credentials, ) cmd: HttpRequest = resource.tasklists().list() await self.hass.async_add_executor_job(cmd.execute) @@ -56,4 +69,32 @@ class OAuth2FlowHandler( except Exception: # pylint: disable=broad-except self.logger.exception("Unknown error occurred") return self.async_abort(reason="unknown") - return self.async_create_entry(title=self.flow_impl.name, data=data) + user_id = user_resource_info["id"] + if not self.reauth_entry: + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_configured() + return self.async_create_entry(title=user_resource_info["name"], data=data) + + if self.reauth_entry.unique_id == user_id or not self.reauth_entry.unique_id: + return self.async_update_reload_and_abort( + self.reauth_entry, unique_id=user_id, data=data + ) + + return self.async_abort(reason="wrong_account") + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauth dialog.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + return await self.async_step_user() diff --git a/homeassistant/components/google_tasks/const.py b/homeassistant/components/google_tasks/const.py index 87253486127..0cb04bf1d4e 100644 --- a/homeassistant/components/google_tasks/const.py +++ b/homeassistant/components/google_tasks/const.py @@ -6,7 +6,10 @@ DOMAIN = "google_tasks" OAUTH2_AUTHORIZE = "https://accounts.google.com/o/oauth2/v2/auth" OAUTH2_TOKEN = "https://oauth2.googleapis.com/token" -OAUTH2_SCOPES = ["https://www.googleapis.com/auth/tasks"] +OAUTH2_SCOPES = [ + "https://www.googleapis.com/auth/tasks", + "https://www.googleapis.com/auth/userinfo.profile", +] class TaskStatus(StrEnum): diff --git a/homeassistant/components/google_tasks/strings.json b/homeassistant/components/google_tasks/strings.json index 2cf15f0d93d..4479b34935e 100644 --- a/homeassistant/components/google_tasks/strings.json +++ b/homeassistant/components/google_tasks/strings.json @@ -18,6 +18,7 @@ "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", "access_not_configured": "Unable to access the Google API:\n\n{message}", "unknown": "[%key:common::config_flow::error::unknown%]", + "wrong_account": "Wrong account: Please authenticate with the right account.", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" diff --git a/homeassistant/components/google_translate/const.py b/homeassistant/components/google_translate/const.py index 76827606816..68d8208f26b 100644 --- a/homeassistant/components/google_translate/const.py +++ b/homeassistant/components/google_translate/const.py @@ -7,8 +7,25 @@ DEFAULT_LANG = "en" DEFAULT_TLD = "com" DOMAIN = "google_translate" +# INSTRUCTIONS TO UPDATE LIST: +# +# Removal: +# Removal is as simple as deleting the line containing the language code no longer +# supported. +# +# Addition: +# In order to add to this list, follow the below steps: +# 1. Find out if the language is supported: Go to Google Translate website and try +# translating any word from English into your desired language. +# If the "speech" icon is grayed out or no speech is generated, the language is +# not supported and cannot be added. Otherwise, proceed: +# 2. Grab the language code from https://cloud.google.com/translate/docs/languages +# 3. Add the language code in SUPPORT_LANGUAGES, making sure to not disturb the +# alphabetical nature of the list. + SUPPORT_LANGUAGES = [ "af", + "am", "ar", "bg", "bn", @@ -20,16 +37,18 @@ SUPPORT_LANGUAGES = [ "de", "el", "en", - "eo", "es", "et", + "eu", "fi", + "fil", "fr", + "gl", "gu", + "ha", "hi", "hr", "hu", - "hy", "id", "is", "it", @@ -40,15 +59,16 @@ SUPPORT_LANGUAGES = [ "kn", "ko", "la", - "lv", "lt", - "mk", + "lv", "ml", "mr", + "ms", "my", "ne", "nl", "no", + "pa", "pl", "pt", "ro", diff --git a/homeassistant/components/govee_ble/manifest.json b/homeassistant/components/govee_ble/manifest.json index 64feedc44c1..98b802f8233 100644 --- a/homeassistant/components/govee_ble/manifest.json +++ b/homeassistant/components/govee_ble/manifest.json @@ -90,5 +90,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/govee_ble", "iot_class": "local_push", - "requirements": ["govee-ble==0.31.0"] + "requirements": ["govee-ble==0.31.2"] } diff --git a/homeassistant/components/group/notify.py b/homeassistant/components/group/notify.py index bad3d7944d3..425dcf5a914 100644 --- a/homeassistant/components/group/notify.py +++ b/homeassistant/components/group/notify.py @@ -34,12 +34,12 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( def add_defaults( - input_data: dict[str, Any], default_data: dict[str, Any] + input_data: dict[str, Any], default_data: Mapping[str, Any] ) -> dict[str, Any]: """Deep update a dictionary with default values.""" for key, val in default_data.items(): if isinstance(val, Mapping): - input_data[key] = add_defaults(input_data.get(key, {}), val) # type: ignore[arg-type] + input_data[key] = add_defaults(input_data.get(key, {}), val) elif key not in input_data: input_data[key] = val return input_data diff --git a/homeassistant/components/group/registry.py b/homeassistant/components/group/registry.py index 1441d39d331..6cdb929d60c 100644 --- a/homeassistant/components/group/registry.py +++ b/homeassistant/components/group/registry.py @@ -47,10 +47,12 @@ def _process_group_platform( class GroupIntegrationRegistry: """Class to hold a registry of integrations.""" - on_off_mapping: dict[str, str] = {STATE_ON: STATE_OFF} - off_on_mapping: dict[str, str] = {STATE_OFF: STATE_ON} - on_states_by_domain: dict[str, set] = {} - exclude_domains: set = set() + def __init__(self) -> None: + """Imitialize registry.""" + self.on_off_mapping: dict[str, str] = {STATE_ON: STATE_OFF} + self.off_on_mapping: dict[str, str] = {STATE_OFF: STATE_ON} + self.on_states_by_domain: dict[str, set[str]] = {} + self.exclude_domains: set[str] = set() def exclude_domain(self) -> None: """Exclude the current domain.""" diff --git a/homeassistant/components/harmony/entity.py b/homeassistant/components/harmony/entity.py index 99b5744e0ed..8bfa9fbad4d 100644 --- a/homeassistant/components/harmony/entity.py +++ b/homeassistant/components/harmony/entity.py @@ -6,6 +6,7 @@ from collections.abc import Callable from datetime import datetime import logging +from homeassistant.core import callback from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_call_later @@ -38,7 +39,7 @@ class HarmonyEntity(Entity): _LOGGER.debug("%s: connected to the HUB", self._data.name) self.async_write_ha_state() - self._clear_disconnection_delay() + self._async_clear_disconnection_delay() async def async_got_disconnected(self, _: str | None = None) -> None: """Notification that we're disconnected from the HUB.""" @@ -46,15 +47,19 @@ class HarmonyEntity(Entity): # We're going to wait for 10 seconds before announcing we're # unavailable, this to allow a reconnection to happen. self._unsub_mark_disconnected = async_call_later( - self.hass, TIME_MARK_DISCONNECTED, self._mark_disconnected_if_unavailable + self.hass, + TIME_MARK_DISCONNECTED, + self._async_mark_disconnected_if_unavailable, ) - def _clear_disconnection_delay(self) -> None: + @callback + def _async_clear_disconnection_delay(self) -> None: if self._unsub_mark_disconnected: self._unsub_mark_disconnected() self._unsub_mark_disconnected = None - def _mark_disconnected_if_unavailable(self, _: datetime) -> None: + @callback + def _async_mark_disconnected_if_unavailable(self, _: datetime) -> None: self._unsub_mark_disconnected = None if not self.available: # Still disconnected. Let the state engine know. diff --git a/homeassistant/components/harmony/remote.py b/homeassistant/components/harmony/remote.py index c6b2e9be718..0c9bdcb9c6e 100644 --- a/homeassistant/components/harmony/remote.py +++ b/homeassistant/components/harmony/remote.py @@ -138,7 +138,7 @@ class HarmonyRemote(HarmonyEntity, RemoteEntity, RestoreEntity): _LOGGER.debug("%s: Harmony Hub added", self._data.name) - self.async_on_remove(self._clear_disconnection_delay) + self.async_on_remove(self._async_clear_disconnection_delay) self._setup_callbacks() self.async_on_remove( diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 5a1edcd3c3f..3494798b50b 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.46", "babel==2.13.1"] + "requirements": ["holidays==0.47", "babel==2.13.1"] } diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index d46a2e50bfd..09b2f17c947 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -192,7 +192,7 @@ "service_not_found": { "message": "Service {domain}.{service} not found." }, - "service_does_not_supports_reponse": { + "service_does_not_support_response": { "message": "A service which does not return responses can't be called with {return_response}." }, "service_lacks_response_request": { diff --git a/homeassistant/components/homeassistant_alerts/__init__.py b/homeassistant/components/homeassistant_alerts/__init__.py index 7dcd9f8db97..ef5e330699a 100644 --- a/homeassistant/components/homeassistant_alerts/__init__.py +++ b/homeassistant/components/homeassistant_alerts/__init__.py @@ -20,7 +20,7 @@ from homeassistant.helpers.issue_registry import ( async_create_issue, async_delete_issue, ) -from homeassistant.helpers.start import async_at_start +from homeassistant.helpers.start import async_at_started from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.setup import EventComponentLoaded @@ -30,6 +30,8 @@ DOMAIN = "homeassistant_alerts" UPDATE_INTERVAL = timedelta(hours=3) _LOGGER = logging.getLogger(__name__) +REQUEST_TIMEOUT = aiohttp.ClientTimeout(total=30) + CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @@ -52,7 +54,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: try: response = await async_get_clientsession(hass).get( f"https://alerts.home-assistant.io/alerts/{alert.alert_id}.json", - timeout=aiohttp.ClientTimeout(total=30), + timeout=REQUEST_TIMEOUT, ) except TimeoutError: _LOGGER.warning("Error fetching %s: timeout", alert.filename) @@ -106,7 +108,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await coordinator.async_refresh() hass.bus.async_listen(EVENT_COMPONENT_LOADED, _component_loaded) - async_at_start(hass, initial_refresh) + async_at_started(hass, initial_refresh) return True @@ -146,7 +148,7 @@ class AlertUpdateCoordinator(DataUpdateCoordinator[dict[str, IntegrationAlert]]) async def _async_update_data(self) -> dict[str, IntegrationAlert]: response = await async_get_clientsession(self.hass).get( "https://alerts.home-assistant.io/alerts.json", - timeout=aiohttp.ClientTimeout(total=10), + timeout=REQUEST_TIMEOUT, ) alerts = await response.json() diff --git a/homeassistant/components/homeassistant_sky_connect/__init__.py b/homeassistant/components/homeassistant_sky_connect/__init__.py index a85a1161792..fc02f31f263 100644 --- a/homeassistant/components/homeassistant_sky_connect/__init__.py +++ b/homeassistant/components/homeassistant_sky_connect/__init__.py @@ -2,87 +2,62 @@ from __future__ import annotations -from homeassistant.components import usb -from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( - check_multi_pan_addon, - get_zigbee_socket, - multi_pan_addon_using_device, -) -from homeassistant.config_entries import SOURCE_HARDWARE, ConfigEntry -from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError -from homeassistant.helpers import discovery_flow +import logging -from .const import DOMAIN -from .util import get_hardware_variant, get_usb_service_info +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from .util import guess_firmware_type -async def _async_usb_scan_done(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Finish Home Assistant SkyConnect config entry setup.""" - matcher = usb.USBCallbackMatcher( - domain=DOMAIN, - vid=entry.data["vid"].upper(), - pid=entry.data["pid"].upper(), - serial_number=entry.data["serial_number"].lower(), - manufacturer=entry.data["manufacturer"].lower(), - description=entry.data["description"].lower(), - ) - - if not usb.async_is_plugged_in(hass, matcher): - # The USB dongle is not plugged in, remove the config entry - hass.async_create_task( - hass.config_entries.async_remove(entry.entry_id), eager_start=True - ) - return - - usb_dev = entry.data["device"] - # The call to get_serial_by_id can be removed in HA Core 2024.1 - dev_path = await hass.async_add_executor_job(usb.get_serial_by_id, usb_dev) - - if not await multi_pan_addon_using_device(hass, dev_path): - usb_info = get_usb_service_info(entry) - await hass.config_entries.flow.async_init( - "zha", - context={"source": "usb"}, - data=usb_info, - ) - return - - hw_variant = get_hardware_variant(entry) - hw_discovery_data = { - "name": f"{hw_variant.short_name} Multiprotocol", - "port": { - "path": get_zigbee_socket(), - }, - "radio_type": "ezsp", - } - discovery_flow.async_create_flow( - hass, - "zha", - context={"source": SOURCE_HARDWARE}, - data=hw_discovery_data, - ) +_LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a Home Assistant SkyConnect config entry.""" - - try: - await check_multi_pan_addon(hass) - except HomeAssistantError as err: - raise ConfigEntryNotReady from err - - @callback - def async_usb_scan_done() -> None: - """Handle usb discovery started.""" - hass.async_create_task(_async_usb_scan_done(hass, entry), eager_start=True) - - unsub_usb = usb.async_register_initial_scan_callback(hass, async_usb_scan_done) - entry.async_on_unload(unsub_usb) - return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return True + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + + _LOGGER.debug( + "Migrating from version %s:%s", config_entry.version, config_entry.minor_version + ) + + if config_entry.version == 1: + if config_entry.minor_version == 1: + # Add-on startup with type service get started before Core, always (e.g. the + # Multi-Protocol add-on). Probing the firmware would interfere with the add-on, + # so we can't safely probe here. Instead, we must make an educated guess! + firmware_guess = await guess_firmware_type( + hass, config_entry.data["device"] + ) + + new_data = {**config_entry.data} + new_data["firmware"] = firmware_guess.firmware_type.value + + # Copy `description` to `product` + new_data["product"] = new_data["description"] + + hass.config_entries.async_update_entry( + config_entry, + data=new_data, + version=1, + minor_version=2, + ) + + _LOGGER.debug( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + + return True + + # This means the user has downgraded from a future version + return False diff --git a/homeassistant/components/homeassistant_sky_connect/config_flow.py b/homeassistant/components/homeassistant_sky_connect/config_flow.py index 3a3d32c2888..6ffb2783165 100644 --- a/homeassistant/components/homeassistant_sky_connect/config_flow.py +++ b/homeassistant/components/homeassistant_sky_connect/config_flow.py @@ -2,29 +2,498 @@ from __future__ import annotations +from abc import ABC, abstractmethod +import asyncio +import logging from typing import Any +from universal_silabs_flasher.const import ApplicationType + from homeassistant.components import usb +from homeassistant.components.hassio import ( + AddonError, + AddonInfo, + AddonManager, + AddonState, + is_hassio, +) from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.components.zha.repairs.wrong_silabs_firmware import ( + probe_silabs_firmware_type, +) +from homeassistant.config_entries import ( + ConfigEntry, + ConfigEntryBaseFlow, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, + OptionsFlowWithConfigEntry, +) from homeassistant.core import callback +from homeassistant.data_entry_flow import AbortFlow -from .const import DOMAIN, HardwareVariant -from .util import get_hardware_variant, get_usb_service_info +from .const import DOCS_WEB_FLASHER_URL, DOMAIN, ZHA_DOMAIN, HardwareVariant +from .util import ( + get_hardware_variant, + get_otbr_addon_manager, + get_usb_service_info, + get_zha_device_path, + get_zigbee_flasher_addon_manager, +) + +_LOGGER = logging.getLogger(__name__) + +STEP_PICK_FIRMWARE_THREAD = "pick_firmware_thread" +STEP_PICK_FIRMWARE_ZIGBEE = "pick_firmware_zigbee" -class HomeAssistantSkyConnectConfigFlow(ConfigFlow, domain=DOMAIN): +class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC): + """Base flow to install firmware.""" + + _failed_addon_name: str + _failed_addon_reason: str + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate base flow.""" + super().__init__(*args, **kwargs) + + self._usb_info: usb.UsbServiceInfo | None = None + self._hw_variant: HardwareVariant | None = None + self._probed_firmware_type: ApplicationType | None = None + + self.addon_install_task: asyncio.Task | None = None + self.addon_start_task: asyncio.Task | None = None + self.addon_uninstall_task: asyncio.Task | None = None + + def _get_translation_placeholders(self) -> dict[str, str]: + """Shared translation placeholders.""" + placeholders = { + "model": ( + self._hw_variant.full_name + if self._hw_variant is not None + else "unknown" + ), + "firmware_type": ( + self._probed_firmware_type.value + if self._probed_firmware_type is not None + else "unknown" + ), + "docs_web_flasher_url": DOCS_WEB_FLASHER_URL, + } + + self.context["title_placeholders"] = placeholders + + return placeholders + + async def _async_set_addon_config( + self, config: dict, addon_manager: AddonManager + ) -> None: + """Set add-on config.""" + try: + await addon_manager.async_set_addon_options(config) + except AddonError as err: + _LOGGER.error(err) + raise AbortFlow( + "addon_set_config_failed", + description_placeholders=self._get_translation_placeholders(), + ) from err + + async def _async_get_addon_info(self, addon_manager: AddonManager) -> AddonInfo: + """Return add-on info.""" + try: + addon_info = await addon_manager.async_get_addon_info() + except AddonError as err: + _LOGGER.error(err) + raise AbortFlow( + "addon_info_failed", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": addon_manager.addon_name, + }, + ) from err + + return addon_info + + async def async_step_pick_firmware( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Thread or Zigbee firmware.""" + assert self._usb_info is not None + + self._probed_firmware_type = await probe_silabs_firmware_type( + self._usb_info.device, + probe_methods=( + # We probe in order of frequency: Zigbee, Thread, then multi-PAN + ApplicationType.GECKO_BOOTLOADER, + ApplicationType.EZSP, + ApplicationType.SPINEL, + ApplicationType.CPC, + ), + ) + + if self._probed_firmware_type not in ( + ApplicationType.EZSP, + ApplicationType.SPINEL, + ApplicationType.CPC, + ): + return self.async_abort( + reason="unsupported_firmware", + description_placeholders=self._get_translation_placeholders(), + ) + + return self.async_show_menu( + step_id="pick_firmware", + menu_options=[ + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, + ], + description_placeholders=self._get_translation_placeholders(), + ) + + async def async_step_pick_firmware_zigbee( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Zigbee firmware.""" + # Allow the stick to be used with ZHA without flashing + if self._probed_firmware_type == ApplicationType.EZSP: + return await self.async_step_confirm_zigbee() + + if not is_hassio(self.hass): + return self.async_abort( + reason="not_hassio", + description_placeholders=self._get_translation_placeholders(), + ) + + # Only flash new firmware if we need to + fw_flasher_manager = get_zigbee_flasher_addon_manager(self.hass) + addon_info = await self._async_get_addon_info(fw_flasher_manager) + + if addon_info.state == AddonState.NOT_INSTALLED: + return await self.async_step_install_zigbee_flasher_addon() + + if addon_info.state == AddonState.NOT_RUNNING: + return await self.async_step_run_zigbee_flasher_addon() + + # If the addon is already installed and running, fail + return self.async_abort( + reason="addon_already_running", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": fw_flasher_manager.addon_name, + }, + ) + + async def async_step_install_zigbee_flasher_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show progress dialog for installing the Zigbee flasher addon.""" + return await self._install_addon( + get_zigbee_flasher_addon_manager(self.hass), + "install_zigbee_flasher_addon", + "run_zigbee_flasher_addon", + ) + + async def _install_addon( + self, + addon_manager: silabs_multiprotocol_addon.WaitingAddonManager, + step_id: str, + next_step_id: str, + ) -> ConfigFlowResult: + """Show progress dialog for installing an addon.""" + addon_info = await self._async_get_addon_info(addon_manager) + + _LOGGER.debug("Flasher addon state: %s", addon_info) + + if not self.addon_install_task: + self.addon_install_task = self.hass.async_create_task( + addon_manager.async_install_addon_waiting(), + "Addon install", + ) + + if not self.addon_install_task.done(): + return self.async_show_progress( + step_id=step_id, + progress_action="install_addon", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": addon_manager.addon_name, + }, + progress_task=self.addon_install_task, + ) + + try: + await self.addon_install_task + except AddonError as err: + _LOGGER.error(err) + self._failed_addon_name = addon_manager.addon_name + self._failed_addon_reason = "addon_install_failed" + return self.async_show_progress_done(next_step_id="addon_operation_failed") + finally: + self.addon_install_task = None + + return self.async_show_progress_done(next_step_id=next_step_id) + + async def async_step_addon_operation_failed( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Abort when add-on installation or start failed.""" + return self.async_abort( + reason=self._failed_addon_reason, + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": self._failed_addon_name, + }, + ) + + async def async_step_run_zigbee_flasher_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure the flasher addon to point to the SkyConnect and run it.""" + fw_flasher_manager = get_zigbee_flasher_addon_manager(self.hass) + addon_info = await self._async_get_addon_info(fw_flasher_manager) + + assert self._usb_info is not None + new_addon_config = { + **addon_info.options, + "device": self._usb_info.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + + _LOGGER.debug("Reconfiguring flasher addon with %s", new_addon_config) + await self._async_set_addon_config(new_addon_config, fw_flasher_manager) + + if not self.addon_start_task: + + async def start_and_wait_until_done() -> None: + await fw_flasher_manager.async_start_addon_waiting() + # Now that the addon is running, wait for it to finish + await fw_flasher_manager.async_wait_until_addon_state( + AddonState.NOT_RUNNING + ) + + self.addon_start_task = self.hass.async_create_task( + start_and_wait_until_done() + ) + + if not self.addon_start_task.done(): + return self.async_show_progress( + step_id="run_zigbee_flasher_addon", + progress_action="run_zigbee_flasher_addon", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": fw_flasher_manager.addon_name, + }, + progress_task=self.addon_start_task, + ) + + try: + await self.addon_start_task + except (AddonError, AbortFlow) as err: + _LOGGER.error(err) + self._failed_addon_name = fw_flasher_manager.addon_name + self._failed_addon_reason = "addon_start_failed" + return self.async_show_progress_done(next_step_id="addon_operation_failed") + finally: + self.addon_start_task = None + + return self.async_show_progress_done( + next_step_id="uninstall_zigbee_flasher_addon" + ) + + async def async_step_uninstall_zigbee_flasher_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Uninstall the flasher addon.""" + fw_flasher_manager = get_zigbee_flasher_addon_manager(self.hass) + + if not self.addon_uninstall_task: + _LOGGER.debug("Uninstalling flasher addon") + self.addon_uninstall_task = self.hass.async_create_task( + fw_flasher_manager.async_uninstall_addon_waiting() + ) + + if not self.addon_uninstall_task.done(): + return self.async_show_progress( + step_id="uninstall_zigbee_flasher_addon", + progress_action="uninstall_zigbee_flasher_addon", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": fw_flasher_manager.addon_name, + }, + progress_task=self.addon_uninstall_task, + ) + + try: + await self.addon_uninstall_task + except (AddonError, AbortFlow) as err: + _LOGGER.error(err) + # The uninstall failing isn't critical so we can just continue + finally: + self.addon_uninstall_task = None + + return self.async_show_progress_done(next_step_id="confirm_zigbee") + + async def async_step_confirm_zigbee( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm Zigbee setup.""" + assert self._usb_info is not None + assert self._hw_variant is not None + self._probed_firmware_type = ApplicationType.EZSP + + if user_input is not None: + await self.hass.config_entries.flow.async_init( + ZHA_DOMAIN, + context={"source": "hardware"}, + data={ + "name": self._hw_variant.full_name, + "port": { + "path": self._usb_info.device, + "baudrate": 115200, + "flow_control": "hardware", + }, + "radio_type": "ezsp", + }, + ) + + return self._async_flow_finished() + + return self.async_show_form( + step_id="confirm_zigbee", + description_placeholders=self._get_translation_placeholders(), + ) + + async def async_step_pick_firmware_thread( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Thread firmware.""" + # We install the OTBR addon no matter what, since it is required to use Thread + if not is_hassio(self.hass): + return self.async_abort( + reason="not_hassio_thread", + description_placeholders=self._get_translation_placeholders(), + ) + + otbr_manager = get_otbr_addon_manager(self.hass) + addon_info = await self._async_get_addon_info(otbr_manager) + + if addon_info.state == AddonState.NOT_INSTALLED: + return await self.async_step_install_otbr_addon() + + if addon_info.state == AddonState.NOT_RUNNING: + return await self.async_step_start_otbr_addon() + + # If the addon is already installed and running, fail + return self.async_abort( + reason="otbr_addon_already_running", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": otbr_manager.addon_name, + }, + ) + + async def async_step_install_otbr_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show progress dialog for installing the OTBR addon.""" + return await self._install_addon( + get_otbr_addon_manager(self.hass), "install_otbr_addon", "start_otbr_addon" + ) + + async def async_step_start_otbr_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Configure OTBR to point to the SkyConnect and run the addon.""" + otbr_manager = get_otbr_addon_manager(self.hass) + addon_info = await self._async_get_addon_info(otbr_manager) + + assert self._usb_info is not None + new_addon_config = { + **addon_info.options, + "device": self._usb_info.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + + _LOGGER.debug("Reconfiguring OTBR addon with %s", new_addon_config) + await self._async_set_addon_config(new_addon_config, otbr_manager) + + if not self.addon_start_task: + self.addon_start_task = self.hass.async_create_task( + otbr_manager.async_start_addon_waiting() + ) + + if not self.addon_start_task.done(): + return self.async_show_progress( + step_id="start_otbr_addon", + progress_action="start_otbr_addon", + description_placeholders={ + **self._get_translation_placeholders(), + "addon_name": otbr_manager.addon_name, + }, + progress_task=self.addon_start_task, + ) + + try: + await self.addon_start_task + except (AddonError, AbortFlow) as err: + _LOGGER.error(err) + self._failed_addon_name = otbr_manager.addon_name + self._failed_addon_reason = "addon_start_failed" + return self.async_show_progress_done(next_step_id="addon_operation_failed") + finally: + self.addon_start_task = None + + return self.async_show_progress_done(next_step_id="confirm_otbr") + + async def async_step_confirm_otbr( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm OTBR setup.""" + assert self._usb_info is not None + assert self._hw_variant is not None + + self._probed_firmware_type = ApplicationType.SPINEL + + if user_input is not None: + # OTBR discovery is done automatically via hassio + return self._async_flow_finished() + + return self.async_show_form( + step_id="confirm_otbr", + description_placeholders=self._get_translation_placeholders(), + ) + + @abstractmethod + def _async_flow_finished(self) -> ConfigFlowResult: + """Finish the flow.""" + # This should be implemented by a subclass + raise NotImplementedError + + +class HomeAssistantSkyConnectConfigFlow( + BaseFirmwareInstallFlow, ConfigFlow, domain=DOMAIN +): """Handle a config flow for Home Assistant SkyConnect.""" VERSION = 1 + MINOR_VERSION = 2 @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> HomeAssistantSkyConnectOptionsFlow: + ) -> OptionsFlow: """Return the options flow.""" - return HomeAssistantSkyConnectOptionsFlow(config_entry) + firmware_type = ApplicationType(config_entry.data["firmware"]) + + if firmware_type is ApplicationType.CPC: + return HomeAssistantSkyConnectMultiPanOptionsFlowHandler(config_entry) + + return HomeAssistantSkyConnectOptionsFlowHandler(config_entry) async def async_step_usb( self, discovery_info: usb.UsbServiceInfo @@ -37,27 +506,62 @@ class HomeAssistantSkyConnectConfigFlow(ConfigFlow, domain=DOMAIN): manufacturer = discovery_info.manufacturer description = discovery_info.description unique_id = f"{vid}:{pid}_{serial_number}_{manufacturer}_{description}" + if await self.async_set_unique_id(unique_id): self._abort_if_unique_id_configured(updates={"device": device}) + discovery_info.device = await self.hass.async_add_executor_job( + usb.get_serial_by_id, discovery_info.device + ) + + self._usb_info = discovery_info + assert description is not None - hw_variant = HardwareVariant.from_usb_product_name(description) + self._hw_variant = HardwareVariant.from_usb_product_name(description) + + return await self.async_step_confirm() + + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm a discovery.""" + self._set_confirm_only() + + # Without confirmation, discovery can automatically progress into parts of the + # config flow logic that interacts with hardware. + if user_input is not None: + return await self.async_step_pick_firmware() + + return self.async_show_form( + step_id="confirm", + description_placeholders=self._get_translation_placeholders(), + ) + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._usb_info is not None + assert self._hw_variant is not None + assert self._probed_firmware_type is not None return self.async_create_entry( - title=hw_variant.full_name, + title=self._hw_variant.full_name, data={ - "device": device, - "vid": vid, - "pid": pid, - "serial_number": serial_number, - "manufacturer": manufacturer, - "description": description, + "vid": self._usb_info.vid, + "pid": self._usb_info.pid, + "serial_number": self._usb_info.serial_number, + "manufacturer": self._usb_info.manufacturer, + "description": self._usb_info.description, # For backwards compatibility + "product": self._usb_info.description, + "device": self._usb_info.device, + "firmware": self._probed_firmware_type.value, }, ) -class HomeAssistantSkyConnectOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandler): - """Handle an option flow for Home Assistant SkyConnect.""" +class HomeAssistantSkyConnectMultiPanOptionsFlowHandler( + silabs_multiprotocol_addon.OptionsFlowHandler +): + """Multi-PAN options flow for Home Assistant SkyConnect.""" async def _async_serial_port_settings( self, @@ -92,3 +596,97 @@ class HomeAssistantSkyConnectOptionsFlow(silabs_multiprotocol_addon.OptionsFlowH def _hardware_name(self) -> str: """Return the name of the hardware.""" return self._hw_variant.full_name + + +class HomeAssistantSkyConnectOptionsFlowHandler( + BaseFirmwareInstallFlow, OptionsFlowWithConfigEntry +): + """Zigbee and Thread options flow handlers.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(*args, **kwargs) + + self._usb_info = get_usb_service_info(self.config_entry) + self._probed_firmware_type = ApplicationType(self.config_entry.data["firmware"]) + self._hw_variant = HardwareVariant.from_usb_product_name( + self.config_entry.data["product"] + ) + + # Make `context` a regular dictionary + self.context = {} + + # Regenerate the translation placeholders + self._get_translation_placeholders() + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options flow.""" + # Don't probe the running firmware, we load it from the config entry + return self.async_show_menu( + step_id="pick_firmware", + menu_options=[ + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, + ], + description_placeholders=self._get_translation_placeholders(), + ) + + async def async_step_pick_firmware_zigbee( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Zigbee firmware.""" + assert self._usb_info is not None + + if is_hassio(self.hass): + otbr_manager = get_otbr_addon_manager(self.hass) + otbr_addon_info = await self._async_get_addon_info(otbr_manager) + + if ( + otbr_addon_info.state != AddonState.NOT_INSTALLED + and otbr_addon_info.options.get("device") == self._usb_info.device + ): + raise AbortFlow( + "otbr_still_using_stick", + description_placeholders=self._get_translation_placeholders(), + ) + + return await super().async_step_pick_firmware_zigbee(user_input) + + async def async_step_pick_firmware_thread( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Pick Thread firmware.""" + assert self._usb_info is not None + + zha_entries = self.hass.config_entries.async_entries( + ZHA_DOMAIN, + include_ignore=False, + include_disabled=True, + ) + + if zha_entries and get_zha_device_path(zha_entries[0]) == self._usb_info.device: + raise AbortFlow( + "zha_still_using_stick", + description_placeholders=self._get_translation_placeholders(), + ) + + return await super().async_step_pick_firmware_thread(user_input) + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._usb_info is not None + assert self._hw_variant is not None + assert self._probed_firmware_type is not None + + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + "firmware": self._probed_firmware_type.value, + }, + options=self.config_entry.options, + ) + + return self.async_create_entry(title="", data={}) diff --git a/homeassistant/components/homeassistant_sky_connect/const.py b/homeassistant/components/homeassistant_sky_connect/const.py index 1dd1471c470..1d6c16dc528 100644 --- a/homeassistant/components/homeassistant_sky_connect/const.py +++ b/homeassistant/components/homeassistant_sky_connect/const.py @@ -5,6 +5,17 @@ import enum from typing import Self DOMAIN = "homeassistant_sky_connect" +ZHA_DOMAIN = "zha" + +DOCS_WEB_FLASHER_URL = "https://skyconnect.home-assistant.io/firmware-update/" + +OTBR_ADDON_NAME = "OpenThread Border Router" +OTBR_ADDON_MANAGER_DATA = "openthread_border_router" +OTBR_ADDON_SLUG = "core_openthread_border_router" + +ZIGBEE_FLASHER_ADDON_NAME = "Silicon Labs Flasher" +ZIGBEE_FLASHER_ADDON_MANAGER_DATA = "silabs_flasher" +ZIGBEE_FLASHER_ADDON_SLUG = "core_silabs_flasher" @dataclasses.dataclass(frozen=True) diff --git a/homeassistant/components/homeassistant_sky_connect/hardware.py b/homeassistant/components/homeassistant_sky_connect/hardware.py index a9abeb27737..2872077111a 100644 --- a/homeassistant/components/homeassistant_sky_connect/hardware.py +++ b/homeassistant/components/homeassistant_sky_connect/hardware.py @@ -25,7 +25,7 @@ def async_info(hass: HomeAssistant) -> list[HardwareInfo]: pid=entry.data["pid"], serial_number=entry.data["serial_number"], manufacturer=entry.data["manufacturer"], - description=entry.data["description"], + description=entry.data["product"], ), name=get_hardware_variant(entry).full_name, url=DOCUMENTATION_URL, diff --git a/homeassistant/components/homeassistant_sky_connect/manifest.json b/homeassistant/components/homeassistant_sky_connect/manifest.json index f56fd24de61..c90ea2c075f 100644 --- a/homeassistant/components/homeassistant_sky_connect/manifest.json +++ b/homeassistant/components/homeassistant_sky_connect/manifest.json @@ -5,7 +5,7 @@ "config_flow": true, "dependencies": ["hardware", "usb", "homeassistant_hardware"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_sky_connect", - "integration_type": "hardware", + "integration_type": "device", "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/homeassistant_sky_connect/strings.json b/homeassistant/components/homeassistant_sky_connect/strings.json index 825649ef0d3..792406dcb02 100644 --- a/homeassistant/components/homeassistant_sky_connect/strings.json +++ b/homeassistant/components/homeassistant_sky_connect/strings.json @@ -57,6 +57,50 @@ "start_flasher_addon": { "title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::title%]", "description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::description%]" + }, + "confirm": { + "title": "[%key:component::homeassistant_sky_connect::config::step::confirm::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::confirm::description%]" + }, + "pick_firmware": { + "title": "[%key:component::homeassistant_sky_connect::config::step::pick_firmware::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::pick_firmware::description%]", + "menu_options": { + "pick_firmware_thread": "[%key:component::homeassistant_sky_connect::config::step::pick_firmware::menu_options::pick_firmware_thread%]", + "pick_firmware_zigbee": "[%key:component::homeassistant_sky_connect::config::step::pick_firmware::menu_options::pick_firmware_zigbee%]" + } + }, + "install_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_sky_connect::config::step::install_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::install_zigbee_flasher_addon::description%]" + }, + "run_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_sky_connect::config::step::run_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::run_zigbee_flasher_addon::description%]" + }, + "zigbee_flasher_failed": { + "title": "[%key:component::homeassistant_sky_connect::config::step::zigbee_flasher_failed::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::zigbee_flasher_failed::description%]" + }, + "confirm_zigbee": { + "title": "[%key:component::homeassistant_sky_connect::config::step::confirm_zigbee::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::confirm_zigbee::description%]" + }, + "install_otbr_addon": { + "title": "[%key:component::homeassistant_sky_connect::config::step::install_otbr_addon::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::install_otbr_addon::description%]" + }, + "start_otbr_addon": { + "title": "[%key:component::homeassistant_sky_connect::config::step::start_otbr_addon::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::start_otbr_addon::description%]" + }, + "otbr_failed": { + "title": "[%key:component::homeassistant_sky_connect::config::step::otbr_failed::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::otbr_failed::description%]" + }, + "confirm_otbr": { + "title": "[%key:component::homeassistant_sky_connect::config::step::confirm_otbr::title%]", + "description": "[%key:component::homeassistant_sky_connect::config::step::confirm_otbr::description%]" } }, "error": { @@ -68,12 +112,92 @@ "addon_already_running": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_already_running%]", "addon_set_config_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_set_config_failed%]", "addon_start_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_start_failed%]", + "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]", "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", - "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]" + "not_hassio_thread": "[%key:component::homeassistant_sky_connect::config::abort::not_hassio_thread%]", + "otbr_addon_already_running": "[%key:component::homeassistant_sky_connect::config::abort::otbr_addon_already_running%]", + "zha_still_using_stick": "This {model} is in use by the Zigbee Home Automation integration. Please migrate your Zigbee network to another adapter or delete the integration and try again.", + "otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again." }, "progress": { "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", - "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]" + "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "install_zigbee_flasher_addon": "[%key:component::homeassistant_sky_connect::config::progress::install_zigbee_flasher_addon%]", + "run_zigbee_flasher_addon": "[%key:component::homeassistant_sky_connect::config::progress::run_zigbee_flasher_addon%]", + "uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_sky_connect::config::progress::uninstall_zigbee_flasher_addon%]" + } + }, + "config": { + "flow_title": "{model}", + "step": { + "confirm": { + "title": "Set up the {model}", + "description": "The {model} can be used as either a Thread border router or a Zigbee coordinator. In the next step, you will choose which firmware will be configured." + }, + "pick_firmware": { + "title": "Pick your firmware", + "description": "The {model} can be used as a Thread border router or a Zigbee coordinator.", + "menu_options": { + "pick_firmware_thread": "Use as a Thread border router", + "pick_firmware_zigbee": "Use as a Zigbee coordinator" + } + }, + "install_zigbee_flasher_addon": { + "title": "Installing flasher", + "description": "Installing the Silicon Labs Flasher add-on." + }, + "run_zigbee_flasher_addon": { + "title": "Installing Zigbee firmware", + "description": "Installing Zigbee firmware. This will take about a minute." + }, + "uninstall_zigbee_flasher_addon": { + "title": "Removing flasher", + "description": "Removing the Silicon Labs Flasher add-on." + }, + "zigbee_flasher_failed": { + "title": "Zigbee installation failed", + "description": "The Zigbee firmware installation process was unsuccessful. Ensure no other software is trying to communicate with the {model} and try again." + }, + "confirm_zigbee": { + "title": "Zigbee setup complete", + "description": "Your {model} is now a Zigbee coordinator and will be shown as discovered by the Zigbee Home Automation integration once you exit." + }, + "install_otbr_addon": { + "title": "Installing OpenThread Border Router add-on", + "description": "The OpenThread Border Router (OTBR) add-on is being installed." + }, + "start_otbr_addon": { + "title": "Starting OpenThread Border Router add-on", + "description": "The OpenThread Border Router (OTBR) add-on is now starting." + }, + "otbr_failed": { + "title": "Failed to setup OpenThread Border Router", + "description": "The OpenThread Border Router add-on installation was unsuccessful. Ensure no other software is trying to communicate with the {model}, you have access to the internet and can install other add-ons, and try again. Check the Supervisor logs if the problem persists." + }, + "confirm_otbr": { + "title": "OpenThread Border Router setup complete", + "description": "Your {model} is now an OpenThread Border Router and will show up in the Thread integration once you exit." + } + }, + "abort": { + "addon_info_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_info_failed%]", + "addon_install_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_install_failed%]", + "addon_already_running": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_already_running%]", + "addon_set_config_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_set_config_failed%]", + "addon_start_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::addon_start_failed%]", + "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]", + "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", + "not_hassio_thread": "The OpenThread Border Router addon can only be installed with Home Assistant OS. If you would like to use the {model} as an Thread border router, please flash the firmware manually using the [web flasher]({docs_web_flasher_url}) and set up OpenThread Border Router to communicate with it.", + "otbr_addon_already_running": "The OpenThread Border Router add-on is already running, it cannot be installed again." + }, + "progress": { + "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", + "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "install_zigbee_flasher_addon": "The Silicon Labs Flasher addon is installed, this may take a few minutes.", + "run_zigbee_flasher_addon": "Please wait while Zigbee firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes.", + "uninstall_zigbee_flasher_addon": "The Silicon Labs Flasher addon is being removed." } } } diff --git a/homeassistant/components/homeassistant_sky_connect/util.py b/homeassistant/components/homeassistant_sky_connect/util.py index e1de1d3b442..f242416fa9a 100644 --- a/homeassistant/components/homeassistant_sky_connect/util.py +++ b/homeassistant/components/homeassistant_sky_connect/util.py @@ -2,10 +2,35 @@ from __future__ import annotations -from homeassistant.components import usb -from homeassistant.config_entries import ConfigEntry +from collections import defaultdict +from dataclasses import dataclass +import logging +from typing import cast -from .const import HardwareVariant +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components import usb +from homeassistant.components.hassio import AddonError, AddonState, is_hassio +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + WaitingAddonManager, + get_multiprotocol_addon_manager, +) +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.singleton import singleton + +from .const import ( + OTBR_ADDON_MANAGER_DATA, + OTBR_ADDON_NAME, + OTBR_ADDON_SLUG, + ZHA_DOMAIN, + ZIGBEE_FLASHER_ADDON_MANAGER_DATA, + ZIGBEE_FLASHER_ADDON_NAME, + ZIGBEE_FLASHER_ADDON_SLUG, + HardwareVariant, +) + +_LOGGER = logging.getLogger(__name__) def get_usb_service_info(config_entry: ConfigEntry) -> usb.UsbServiceInfo: @@ -16,10 +41,115 @@ def get_usb_service_info(config_entry: ConfigEntry) -> usb.UsbServiceInfo: pid=config_entry.data["pid"], serial_number=config_entry.data["serial_number"], manufacturer=config_entry.data["manufacturer"], - description=config_entry.data["description"], + description=config_entry.data["product"], ) def get_hardware_variant(config_entry: ConfigEntry) -> HardwareVariant: """Get the hardware variant from the config entry.""" - return HardwareVariant.from_usb_product_name(config_entry.data["description"]) + return HardwareVariant.from_usb_product_name(config_entry.data["product"]) + + +def get_zha_device_path(config_entry: ConfigEntry) -> str: + """Get the device path from a ZHA config entry.""" + return cast(str, config_entry.data["device"]["path"]) + + +@singleton(OTBR_ADDON_MANAGER_DATA) +@callback +def get_otbr_addon_manager(hass: HomeAssistant) -> WaitingAddonManager: + """Get the OTBR add-on manager.""" + return WaitingAddonManager( + hass, + _LOGGER, + OTBR_ADDON_NAME, + OTBR_ADDON_SLUG, + ) + + +@singleton(ZIGBEE_FLASHER_ADDON_MANAGER_DATA) +@callback +def get_zigbee_flasher_addon_manager(hass: HomeAssistant) -> WaitingAddonManager: + """Get the flasher add-on manager.""" + return WaitingAddonManager( + hass, + _LOGGER, + ZIGBEE_FLASHER_ADDON_NAME, + ZIGBEE_FLASHER_ADDON_SLUG, + ) + + +@dataclass(slots=True, kw_only=True) +class FirmwareGuess: + """Firmware guess.""" + + is_running: bool + firmware_type: ApplicationType + source: str + + +async def guess_firmware_type(hass: HomeAssistant, device_path: str) -> FirmwareGuess: + """Guess the firmware type based on installed addons and other integrations.""" + device_guesses: defaultdict[str | None, list[FirmwareGuess]] = defaultdict(list) + + for zha_config_entry in hass.config_entries.async_entries(ZHA_DOMAIN): + zha_path = get_zha_device_path(zha_config_entry) + device_guesses[zha_path].append( + FirmwareGuess( + is_running=(zha_config_entry.state == ConfigEntryState.LOADED), + firmware_type=ApplicationType.EZSP, + source="zha", + ) + ) + + if is_hassio(hass): + otbr_addon_manager = get_otbr_addon_manager(hass) + + try: + otbr_addon_info = await otbr_addon_manager.async_get_addon_info() + except AddonError: + pass + else: + if otbr_addon_info.state != AddonState.NOT_INSTALLED: + otbr_path = otbr_addon_info.options.get("device") + device_guesses[otbr_path].append( + FirmwareGuess( + is_running=(otbr_addon_info.state == AddonState.RUNNING), + firmware_type=ApplicationType.SPINEL, + source="otbr", + ) + ) + + multipan_addon_manager = await get_multiprotocol_addon_manager(hass) + + try: + multipan_addon_info = await multipan_addon_manager.async_get_addon_info() + except AddonError: + pass + else: + if multipan_addon_info.state != AddonState.NOT_INSTALLED: + multipan_path = multipan_addon_info.options.get("device") + device_guesses[multipan_path].append( + FirmwareGuess( + is_running=(multipan_addon_info.state == AddonState.RUNNING), + firmware_type=ApplicationType.CPC, + source="multiprotocol", + ) + ) + + # Fall back to EZSP if we can't guess the firmware type + if device_path not in device_guesses: + return FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" + ) + + # Prioritizes guesses that were pulled from a running addon or integration but keep + # the sort order we defined above + guesses = sorted( + device_guesses[device_path], + key=lambda guess: guess.is_running, + ) + + assert guesses + + return guesses[-1] diff --git a/homeassistant/components/homekit/accessories.py b/homeassistant/components/homekit/accessories.py index f2e1a26b3de..40e86efe6a9 100644 --- a/homeassistant/components/homekit/accessories.py +++ b/homeassistant/components/homekit/accessories.py @@ -46,6 +46,7 @@ from homeassistant.core import ( Context, Event, EventStateChangedData, + HassJobType, HomeAssistant, State, callback as ha_callback, @@ -436,7 +437,10 @@ class HomeAccessory(Accessory): # type: ignore[misc] self._update_available_from_state(state) self._subscriptions.append( async_track_state_change_event( - self.hass, [self.entity_id], self.async_update_event_state_callback + self.hass, + [self.entity_id], + self.async_update_event_state_callback, + job_type=HassJobType.Callback, ) ) @@ -456,6 +460,7 @@ class HomeAccessory(Accessory): # type: ignore[misc] self.hass, [self.linked_battery_sensor], self.async_update_linked_battery_callback, + job_type=HassJobType.Callback, ) ) elif state is not None: @@ -468,6 +473,7 @@ class HomeAccessory(Accessory): # type: ignore[misc] self.hass, [self.linked_battery_charging_sensor], self.async_update_linked_battery_charging_callback, + job_type=HassJobType.Callback, ) ) elif battery_charging_state is None and state is not None: diff --git a/homeassistant/components/homekit/type_cameras.py b/homeassistant/components/homekit/type_cameras.py index d14fef8eabf..4f05bfbd687 100644 --- a/homeassistant/components/homekit/type_cameras.py +++ b/homeassistant/components/homekit/type_cameras.py @@ -20,6 +20,7 @@ from homeassistant.const import STATE_ON from homeassistant.core import ( Event, EventStateChangedData, + HassJobType, HomeAssistant, State, callback, @@ -272,6 +273,7 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] self.hass, [self.linked_motion_sensor], self._async_update_motion_state_event, + job_type=HassJobType.Callback, ) ) @@ -282,6 +284,7 @@ class Camera(HomeAccessory, PyhapCamera): # type: ignore[misc] self.hass, [self.linked_doorbell_sensor], self._async_update_doorbell_state_event, + job_type=HassJobType.Callback, ) ) diff --git a/homeassistant/components/homekit/type_covers.py b/homeassistant/components/homekit/type_covers.py index d14713b5f05..29dda418665 100644 --- a/homeassistant/components/homekit/type_covers.py +++ b/homeassistant/components/homekit/type_covers.py @@ -34,7 +34,13 @@ from homeassistant.const import ( STATE_OPEN, STATE_OPENING, ) -from homeassistant.core import Event, EventStateChangedData, State, callback +from homeassistant.core import ( + Event, + EventStateChangedData, + HassJobType, + State, + callback, +) from homeassistant.helpers.event import async_track_state_change_event from .accessories import TYPES, HomeAccessory @@ -136,6 +142,7 @@ class GarageDoorOpener(HomeAccessory): self.hass, [self.linked_obstruction_sensor], self._async_update_obstruction_event, + job_type=HassJobType.Callback, ) ) diff --git a/homeassistant/components/homekit/type_humidifiers.py b/homeassistant/components/homekit/type_humidifiers.py index 1fca441e800..5bdf5950f18 100644 --- a/homeassistant/components/homekit/type_humidifiers.py +++ b/homeassistant/components/homekit/type_humidifiers.py @@ -25,7 +25,13 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_ON, ) -from homeassistant.core import Event, EventStateChangedData, State, callback +from homeassistant.core import ( + Event, + EventStateChangedData, + HassJobType, + State, + callback, +) from homeassistant.helpers.event import async_track_state_change_event from .accessories import TYPES, HomeAccessory @@ -184,6 +190,7 @@ class HumidifierDehumidifier(HomeAccessory): self.hass, [self.linked_humidity_sensor], self.async_update_current_humidity_event, + job_type=HassJobType.Callback, ) ) diff --git a/homeassistant/components/homematicip_cloud/climate.py b/homeassistant/components/homematicip_cloud/climate.py index b0eb2a9edfa..dd89efed1c9 100644 --- a/homeassistant/components/homematicip_cloud/climate.py +++ b/homeassistant/components/homematicip_cloud/climate.py @@ -13,6 +13,7 @@ from homematicip.aio.group import AsyncHeatingGroup from homematicip.base.enums import AbsenceType from homematicip.device import Switch from homematicip.functionalHomes import IndoorClimateHome +from homematicip.group import HeatingCoolingProfile from homeassistant.components.climate import ( PRESET_AWAY, @@ -35,6 +36,14 @@ from .hap import HomematicipHAP HEATING_PROFILES = {"PROFILE_1": 0, "PROFILE_2": 1, "PROFILE_3": 2} COOLING_PROFILES = {"PROFILE_4": 3, "PROFILE_5": 4, "PROFILE_6": 5} +NICE_PROFILE_NAMES = { + "PROFILE_1": "Default", + "PROFILE_2": "Alternative 1", + "PROFILE_3": "Alternative 2", + "PROFILE_4": "Cooling 1", + "PROFILE_5": "Cooling 2", + "PROFILE_6": "Cooling 3", +} ATTR_PRESET_END_TIME = "preset_end_time" PERMANENT_END_TIME = "permanent" @@ -164,8 +173,9 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): return PRESET_ECO return ( - self._device.activeProfile.name - if self._device.activeProfile.name in self._device_profile_names + self._get_qualified_profile_name(self._device.activeProfile) + if self._get_qualified_profile_name(self._device.activeProfile) + in self._device_profile_names else None ) @@ -218,9 +228,6 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - if preset_mode not in self.preset_modes: - return - if self._device.boostMode and preset_mode != PRESET_BOOST: await self._device.set_boost(False) if preset_mode == PRESET_BOOST: @@ -256,20 +263,30 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): return self._home.get_functionalHome(IndoorClimateHome) @property - def _device_profiles(self) -> list[Any]: + def _device_profiles(self) -> list[HeatingCoolingProfile]: """Return the relevant profiles.""" return [ profile for profile in self._device.profiles - if profile.visible - and profile.name != "" - and profile.index in self._relevant_profile_group + if profile.visible and profile.index in self._relevant_profile_group ] @property def _device_profile_names(self) -> list[str]: """Return a collection of profile names.""" - return [profile.name for profile in self._device_profiles] + return [ + self._get_qualified_profile_name(profile) + for profile in self._device_profiles + ] + + def _get_qualified_profile_name(self, profile: HeatingCoolingProfile) -> str: + """Get a name for the given profile. If exists, this is the name of the profile.""" + if profile.name != "": + return profile.name + if profile.index in NICE_PROFILE_NAMES: + return NICE_PROFILE_NAMES[profile.index] + + return profile.index def _get_profile_idx_by_name(self, profile_name: str) -> int: """Return a profile index by name.""" @@ -277,7 +294,7 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity): index_name = [ profile.index for profile in self._device_profiles - if profile.name == profile_name + if self._get_qualified_profile_name(profile) == profile_name ] return relevant_index[index_name[0]] diff --git a/homeassistant/components/homematicip_cloud/manifest.json b/homeassistant/components/homematicip_cloud/manifest.json index 580a0f637c1..9da4e1bee05 100644 --- a/homeassistant/components/homematicip_cloud/manifest.json +++ b/homeassistant/components/homematicip_cloud/manifest.json @@ -1,7 +1,7 @@ { "domain": "homematicip_cloud", "name": "HomematicIP Cloud", - "codeowners": [], + "codeowners": ["@hahn-th"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/homematicip_cloud", "iot_class": "cloud_push", diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index b9515c306d6..f447860c53f 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -690,7 +690,10 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): CONF_PORT: user_input[CONF_PORT], } return self.async_update_reload_and_abort( - entry, options=new_options, reason="reconfigure_successful" + entry, + options=new_options, + reason="reconfigure_successful", + reload_even_if_entry_is_unchanged=False, ) return self.async_show_form( diff --git a/homeassistant/components/html5/notify.py b/homeassistant/components/html5/notify.py index 782340dffa6..6049f8e2434 100644 --- a/homeassistant/components/html5/notify.py +++ b/homeassistant/components/html5/notify.py @@ -165,7 +165,7 @@ HTML5_SHOWNOTIFICATION_PARAMETERS = ( ) -def get_service( +async def async_get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, @@ -173,7 +173,7 @@ def get_service( """Get the HTML5 push notification service.""" json_path = hass.config.path(REGISTRATIONS_FILE) - registrations = _load_config(json_path) + registrations = await hass.async_add_executor_job(_load_config, json_path) vapid_pub_key = config[ATTR_VAPID_PUB_KEY] vapid_prv_key = config[ATTR_VAPID_PRV_KEY] diff --git a/homeassistant/components/http/__init__.py b/homeassistant/components/http/__init__.py index f9532b90ce6..83601599d88 100644 --- a/homeassistant/components/http/__init__.py +++ b/homeassistant/components/http/__init__.py @@ -69,6 +69,7 @@ from homeassistant.util.json import json_loads from .auth import async_setup_auth, async_sign_path from .ban import setup_bans from .const import ( # noqa: F401 + DOMAIN, KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER, StrictConnectionMode, @@ -82,8 +83,6 @@ from .security_filter import setup_security_filter from .static import CACHE_HEADERS, CachingStaticResource from .web_runner import HomeAssistantTCPSite -DOMAIN: Final = "http" - CONF_SERVER_HOST: Final = "server_host" CONF_SERVER_PORT: Final = "server_port" CONF_BASE_URL: Final = "base_url" @@ -149,7 +148,7 @@ HTTP_SCHEMA: Final = vol.All( vol.Optional(CONF_USE_X_FRAME_OPTIONS, default=True): cv.boolean, vol.Optional( CONF_STRICT_CONNECTION, default=StrictConnectionMode.DISABLED - ): vol.In([e.value for e in StrictConnectionMode]), + ): vol.Coerce(StrictConnectionMode), } ), ) @@ -628,7 +627,9 @@ def _setup_services(hass: HomeAssistant, conf: ConfData) -> None: ) try: - url = get_url(hass, prefer_external=True, allow_internal=False) + url = get_url( + hass, prefer_external=True, allow_internal=False, allow_cloud=False + ) except NoURLAvailableError as ex: raise ServiceValidationError( translation_domain=DOMAIN, diff --git a/homeassistant/components/http/auth.py b/homeassistant/components/http/auth.py index 1eb74289089..58dae21d2a6 100644 --- a/homeassistant/components/http/auth.py +++ b/homeassistant/components/http/auth.py @@ -25,6 +25,7 @@ from homeassistant.auth.const import GROUP_ID_READ_ONLY from homeassistant.auth.models import User from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import singleton from homeassistant.helpers.http import current_request from homeassistant.helpers.json import json_bytes from homeassistant.helpers.network import is_cloud_connection @@ -32,6 +33,7 @@ from homeassistant.helpers.storage import Store from homeassistant.util.network import is_local from .const import ( + DOMAIN, KEY_AUTHENTICATED, KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER, @@ -50,8 +52,9 @@ STORAGE_VERSION = 1 STORAGE_KEY = "http.auth" CONTENT_USER_NAME = "Home Assistant Content" STRICT_CONNECTION_EXCLUDED_PATH = "/api/webhook/" -STRICT_CONNECTION_STATIC_PAGE = os.path.join( - os.path.dirname(__file__), "strict_connection_static_page.html" +STRICT_CONNECTION_GUARD_PAGE_NAME = "strict_connection_guard_page.html" +STRICT_CONNECTION_GUARD_PAGE = os.path.join( + os.path.dirname(__file__), STRICT_CONNECTION_GUARD_PAGE_NAME ) @@ -156,16 +159,10 @@ async def async_setup_auth( await store.async_save(data) hass.data[STORAGE_KEY] = refresh_token.id - strict_connection_static_file_content = None - if strict_connection_mode_non_cloud is StrictConnectionMode.STATIC_PAGE: - def read_static_page() -> str: - with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file: - return file.read() - - strict_connection_static_file_content = await hass.async_add_executor_job( - read_static_page - ) + if strict_connection_mode_non_cloud is StrictConnectionMode.GUARD_PAGE: + # Load the guard page content on setup + await _read_strict_connection_guard_page(hass) @callback def async_validate_auth_header(request: Request) -> bool: @@ -255,21 +252,36 @@ async def async_setup_auth( authenticated = True auth_type = "signed request" - if ( - not authenticated - and strict_connection_mode_non_cloud is not StrictConnectionMode.DISABLED - and not request.path.startswith(STRICT_CONNECTION_EXCLUDED_PATH) - and not await hass.auth.session.async_validate_request_for_strict_connection_session( - request - ) - and ( - resp := _async_perform_action_on_non_local( - request, strict_connection_static_file_content - ) - ) - is not None + if not authenticated and not request.path.startswith( + STRICT_CONNECTION_EXCLUDED_PATH ): - return resp + strict_connection_mode = strict_connection_mode_non_cloud + strict_connection_func = ( + _async_perform_strict_connection_action_on_non_local + ) + if is_cloud_connection(hass): + from homeassistant.components.cloud.util import ( # pylint: disable=import-outside-toplevel + get_strict_connection_mode, + ) + + strict_connection_mode = get_strict_connection_mode(hass) + strict_connection_func = _async_perform_strict_connection_action + + if ( + strict_connection_mode is not StrictConnectionMode.DISABLED + and not await hass.auth.session.async_validate_request_for_strict_connection_session( + request + ) + and ( + resp := await strict_connection_func( + hass, + request, + strict_connection_mode is StrictConnectionMode.GUARD_PAGE, + ) + ) + is not None + ): + return resp if authenticated and _LOGGER.isEnabledFor(logging.DEBUG): _LOGGER.debug( @@ -286,17 +298,17 @@ async def async_setup_auth( app.middlewares.append(auth_middleware) -@callback -def _async_perform_action_on_non_local( +async def _async_perform_strict_connection_action_on_non_local( + hass: HomeAssistant, request: Request, - strict_connection_static_file_content: str | None, + guard_page: bool, ) -> StreamResponse | None: """Perform strict connection mode action if the request is not local. The function does the following: - Try to get the IP address of the request. If it fails, assume it's not local - If the request is local, return None (allow the request to continue) - - If strict_connection_static_file_content is set, return a response with the content + - If guard_page is True, return a response with the content - Otherwise close the connection and raise an exception """ try: @@ -308,10 +320,25 @@ def _async_perform_action_on_non_local( if ip_address_ and is_local(ip_address_): return None - _LOGGER.debug("Perform strict connection action for %s", ip_address_) - if strict_connection_static_file_content: + return await _async_perform_strict_connection_action(hass, request, guard_page) + + +async def _async_perform_strict_connection_action( + hass: HomeAssistant, + request: Request, + guard_page: bool, +) -> StreamResponse | None: + """Perform strict connection mode action. + + The function does the following: + - If guard_page is True, return a response with the content + - Otherwise close the connection and raise an exception + """ + + _LOGGER.debug("Perform strict connection action for %s", request.remote) + if guard_page: return Response( - text=strict_connection_static_file_content, + text=await _read_strict_connection_guard_page(hass), content_type="text/html", status=HTTPStatus.IM_A_TEAPOT, ) @@ -322,3 +349,14 @@ def _async_perform_action_on_non_local( # We need to raise an exception to stop processing the request raise HTTPBadRequest + + +@singleton.singleton(f"{DOMAIN}_{STRICT_CONNECTION_GUARD_PAGE_NAME}") +async def _read_strict_connection_guard_page(hass: HomeAssistant) -> str: + """Read the strict connection guard page from disk via executor.""" + + def read_guard_page() -> str: + with open(STRICT_CONNECTION_GUARD_PAGE, encoding="utf-8") as file: + return file.read() + + return await hass.async_add_executor_job(read_guard_page) diff --git a/homeassistant/components/http/const.py b/homeassistant/components/http/const.py index d02416c531b..4a15e310b11 100644 --- a/homeassistant/components/http/const.py +++ b/homeassistant/components/http/const.py @@ -5,6 +5,8 @@ from typing import Final from homeassistant.helpers.http import KEY_AUTHENTICATED, KEY_HASS # noqa: F401 +DOMAIN: Final = "http" + KEY_HASS_USER: Final = "hass_user" KEY_HASS_REFRESH_TOKEN_ID: Final = "hass_refresh_token_id" @@ -13,5 +15,5 @@ class StrictConnectionMode(StrEnum): """Enum for strict connection mode.""" DISABLED = "disabled" - STATIC_PAGE = "static_page" + GUARD_PAGE = "guard_page" DROP_CONNECTION = "drop_connection" diff --git a/homeassistant/components/http/strict_connection_static_page.html b/homeassistant/components/http/strict_connection_guard_page.html similarity index 99% rename from homeassistant/components/http/strict_connection_static_page.html rename to homeassistant/components/http/strict_connection_guard_page.html index 86ea8e00e90..8567e500c9d 100644 --- a/homeassistant/components/http/strict_connection_static_page.html +++ b/homeassistant/components/http/strict_connection_guard_page.html @@ -123,7 +123,7 @@

You need access

- This device is not known on + This device is not known to Home Assistant.

diff --git a/homeassistant/components/husqvarna_automower/__init__.py b/homeassistant/components/husqvarna_automower/__init__.py index 03ab02429bb..fe6f6978014 100644 --- a/homeassistant/components/husqvarna_automower/__init__.py +++ b/homeassistant/components/husqvarna_automower/__init__.py @@ -21,6 +21,7 @@ PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, Platform.LAWN_MOWER, + Platform.NUMBER, Platform.SELECT, Platform.SENSOR, Platform.SWITCH, diff --git a/homeassistant/components/husqvarna_automower/api.py b/homeassistant/components/husqvarna_automower/api.py index e5dc00ad7cb..f1d3e1ef4fa 100644 --- a/homeassistant/components/husqvarna_automower/api.py +++ b/homeassistant/components/husqvarna_automower/api.py @@ -1,6 +1,7 @@ """API for Husqvarna Automower bound to Home Assistant OAuth.""" import logging +from typing import cast from aioautomower.auth import AbstractAuth from aioautomower.const import API_BASE_URL @@ -26,4 +27,4 @@ class AsyncConfigEntryAuth(AbstractAuth): async def async_get_access_token(self) -> str: """Return a valid access token.""" await self._oauth_session.async_ensure_token_valid() - return self._oauth_session.token["access_token"] + return cast(str, self._oauth_session.token["access_token"]) diff --git a/homeassistant/components/husqvarna_automower/device_tracker.py b/homeassistant/components/husqvarna_automower/device_tracker.py index a32fd8758bd..780d1da76fb 100644 --- a/homeassistant/components/husqvarna_automower/device_tracker.py +++ b/homeassistant/components/husqvarna_automower/device_tracker.py @@ -1,5 +1,7 @@ """Creates the device tracker entity for the mower.""" +from typing import TYPE_CHECKING + from homeassistant.components.device_tracker import SourceType, TrackerEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -44,9 +46,13 @@ class AutomowerDeviceTrackerEntity(AutomowerBaseEntity, TrackerEntity): @property def latitude(self) -> float: """Return latitude value of the device.""" + if TYPE_CHECKING: + assert self.mower_attributes.positions is not None return self.mower_attributes.positions[0].latitude @property def longitude(self) -> float: """Return longitude value of the device.""" + if TYPE_CHECKING: + assert self.mower_attributes.positions is not None return self.mower_attributes.positions[0].longitude diff --git a/homeassistant/components/husqvarna_automower/icons.json b/homeassistant/components/husqvarna_automower/icons.json index ec11ef92d08..2ecbf9c198a 100644 --- a/homeassistant/components/husqvarna_automower/icons.json +++ b/homeassistant/components/husqvarna_automower/icons.json @@ -8,6 +8,11 @@ "default": "mdi:debug-step-into" } }, + "number": { + "cutting_height": { + "default": "mdi:grass" + } + }, "select": { "headlight_mode": { "default": "mdi:car-light-high" diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index e4536ee594d..647320a8bf3 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], - "requirements": ["aioautomower==2024.3.4"] + "requirements": ["aioautomower==2024.4.4"] } diff --git a/homeassistant/components/husqvarna_automower/number.py b/homeassistant/components/husqvarna_automower/number.py new file mode 100644 index 00000000000..e2e617b427b --- /dev/null +++ b/homeassistant/components/husqvarna_automower/number.py @@ -0,0 +1,104 @@ +"""Creates the number entities for the mower.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +import logging +from typing import TYPE_CHECKING, Any + +from aioautomower.exceptions import ApiException +from aioautomower.model import MowerAttributes +from aioautomower.session import AutomowerSession + +from homeassistant.components.number import NumberEntity, NumberEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import AutomowerDataUpdateCoordinator +from .entity import AutomowerBaseEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class AutomowerNumberEntityDescription(NumberEntityDescription): + """Describes Automower number entity.""" + + exists_fn: Callable[[MowerAttributes], bool] = lambda _: True + value_fn: Callable[[MowerAttributes], int] + set_value_fn: Callable[[AutomowerSession, str, float], Awaitable[Any]] + + +@callback +def _async_get_cutting_height(data: MowerAttributes) -> int: + """Return the cutting height.""" + if TYPE_CHECKING: + # Sensor does not get created if it is None + assert data.cutting_height is not None + return data.cutting_height + + +NUMBER_TYPES: tuple[AutomowerNumberEntityDescription, ...] = ( + AutomowerNumberEntityDescription( + key="cutting_height", + translation_key="cutting_height", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_min_value=1, + native_max_value=9, + exists_fn=lambda data: data.cutting_height is not None, + value_fn=_async_get_cutting_height, + set_value_fn=lambda session, mower_id, cheight: session.set_cutting_height( + mower_id, int(cheight) + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up number platform.""" + coordinator: AutomowerDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + async_add_entities( + AutomowerNumberEntity(mower_id, coordinator, description) + for mower_id in coordinator.data + for description in NUMBER_TYPES + if description.exists_fn(coordinator.data[mower_id]) + ) + + +class AutomowerNumberEntity(AutomowerBaseEntity, NumberEntity): + """Defining the AutomowerNumberEntity with AutomowerNumberEntityDescription.""" + + entity_description: AutomowerNumberEntityDescription + + def __init__( + self, + mower_id: str, + coordinator: AutomowerDataUpdateCoordinator, + description: AutomowerNumberEntityDescription, + ) -> None: + """Set up AutomowerNumberEntity.""" + super().__init__(mower_id, coordinator) + self.entity_description = description + self._attr_unique_id = f"{mower_id}_{description.key}" + + @property + def native_value(self) -> float: + """Return the state of the number.""" + return self.entity_description.value_fn(self.mower_attributes) + + async def async_set_native_value(self, value: float) -> None: + """Change to new number value.""" + try: + await self.entity_description.set_value_fn( + self.coordinator.api, self.mower_id, value + ) + except ApiException as exception: + raise HomeAssistantError( + f"Command couldn't be sent to the command queue: {exception}" + ) from exception diff --git a/homeassistant/components/husqvarna_automower/select.py b/homeassistant/components/husqvarna_automower/select.py index e4376a1bca5..67aac4a2046 100644 --- a/homeassistant/components/husqvarna_automower/select.py +++ b/homeassistant/components/husqvarna_automower/select.py @@ -1,6 +1,7 @@ """Creates a select entity for the headlight of the mower.""" import logging +from typing import cast from aioautomower.exceptions import ApiException from aioautomower.model import HeadlightModes @@ -58,12 +59,14 @@ class AutomowerSelectEntity(AutomowerControlEntity, SelectEntity): @property def current_option(self) -> str: """Return the current option for the entity.""" - return self.mower_attributes.headlight.mode.lower() + return cast(HeadlightModes, self.mower_attributes.headlight.mode).lower() async def async_select_option(self, option: str) -> None: """Change the selected option.""" try: - await self.coordinator.api.set_headlight_mode(self.mower_id, option.upper()) + await self.coordinator.api.set_headlight_mode( + self.mower_id, cast(HeadlightModes, option.upper()) + ) except ApiException as exception: raise HomeAssistantError( f"Command couldn't be sent to the command queue: {exception}" diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 10aec9b1536..6840708ed42 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -18,7 +18,6 @@ from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfLength, UnitOf from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.util import dt as dt_util from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator @@ -298,7 +297,7 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( key="next_start_timestamp", translation_key="next_start_timestamp", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: dt_util.as_local(data.planner.next_start_datetime), + value_fn=lambda data: data.planner.next_start_datetime, ), AutomowerSensorEntityDescription( key="error", diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index 0a2d3685c6e..b4c1c97cd68 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -37,6 +37,11 @@ "name": "Returning to dock" } }, + "number": { + "cutting_height": { + "name": "Cutting height" + } + }, "select": { "headlight_mode": { "name": "Headlight mode", diff --git a/homeassistant/components/hydrawise/__init__.py b/homeassistant/components/hydrawise/__init__.py index 541d4211e49..b4e14c42709 100644 --- a/homeassistant/components/hydrawise/__init__.py +++ b/homeassistant/components/hydrawise/__init__.py @@ -1,56 +1,29 @@ """Support for Hydrawise cloud.""" -from pydrawise import legacy -import voluptuous as vol +from pydrawise import auth, client -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_ACCESS_TOKEN, - CONF_API_KEY, - CONF_SCAN_INTERVAL, - Platform, -) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType +from homeassistant.exceptions import ConfigEntryAuthFailed from .const import DOMAIN, SCAN_INTERVAL from .coordinator import HydrawiseDataUpdateCoordinator -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_ACCESS_TOKEN): cv.string, - vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Hunter Hydrawise component.""" - if DOMAIN not in config: - return True - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_API_KEY: config[DOMAIN][CONF_ACCESS_TOKEN]}, - ) - ) - return True - - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Hydrawise from a config entry.""" - access_token = config_entry.data[CONF_API_KEY] - hydrawise = legacy.LegacyHydrawiseAsync(access_token) + if CONF_USERNAME not in config_entry.data or CONF_PASSWORD not in config_entry.data: + # The GraphQL API requires username and password to authenticate. If either is + # missing, reauth is required. + raise ConfigEntryAuthFailed + + hydrawise = client.Hydrawise( + auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]) + ) + coordinator = HydrawiseDataUpdateCoordinator(hass, hydrawise, SCAN_INTERVAL) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = coordinator diff --git a/homeassistant/components/hydrawise/binary_sensor.py b/homeassistant/components/hydrawise/binary_sensor.py index e75cf56ac75..a93976b12e0 100644 --- a/homeassistant/components/hydrawise/binary_sensor.py +++ b/homeassistant/components/hydrawise/binary_sensor.py @@ -3,20 +3,15 @@ from __future__ import annotations from pydrawise.schema import Zone -import voluptuous as vol from homeassistant.components.binary_sensor import ( - PLATFORM_SCHEMA, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MONITORED_CONDITIONS from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DOMAIN from .coordinator import HydrawiseDataUpdateCoordinator @@ -39,27 +34,6 @@ BINARY_SENSOR_KEYS: list[str] = [ desc.key for desc in (BINARY_SENSOR_STATUS, *BINARY_SENSOR_TYPES) ] -# Deprecated since Home Assistant 2023.10.0 -# Can be removed completely in 2024.4.0 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=BINARY_SENSOR_KEYS): vol.All( - cv.ensure_list, [vol.In(BINARY_SENSOR_KEYS)] - ) - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a sensor for a Hydrawise device.""" - # We don't need to trigger import flow from here as it's triggered from `__init__.py` - return # pragma: no cover - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index cfaaefcd03a..1c2c1c5cf29 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -2,18 +2,16 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Mapping from typing import Any from aiohttp import ClientError -from pydrawise import legacy +from pydrawise import auth, client +from pydrawise.exceptions import NotAuthorizedError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_API_KEY -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN -from homeassistant.data_entry_flow import AbortFlow, FlowResultType -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN, LOGGER @@ -23,14 +21,26 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def _create_entry( - self, api_key: str, *, on_failure: Callable[[str], ConfigFlowResult] + def __init__(self) -> None: + """Construct a ConfigFlow.""" + self.reauth_entry: ConfigEntry | None = None + + async def _create_or_update_entry( + self, + username: str, + password: str, + *, + on_failure: Callable[[str], ConfigFlowResult], ) -> ConfigFlowResult: """Create the config entry.""" - api = legacy.LegacyHydrawiseAsync(api_key) + + # Verify that the provided credentials work.""" + api = client.Hydrawise(auth.Auth(username, password)) try: # Skip fetching zones to save on metered API calls. - user = await api.get_user(fetch_zones=False) + user = await api.get_user() + except NotAuthorizedError: + return on_failure("invalid_auth") except TimeoutError: return on_failure("timeout_connect") except ClientError as ex: @@ -38,51 +48,33 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): return on_failure("cannot_connect") await self.async_set_unique_id(f"hydrawise-{user.customer_id}") - self._abort_if_unique_id_configured() - return self.async_create_entry(title="Hydrawise", data={CONF_API_KEY: api_key}) + if not self.reauth_entry: + self._abort_if_unique_id_configured() + return self.async_create_entry( + title="Hydrawise", + data={CONF_USERNAME: username, CONF_PASSWORD: password}, + ) - def _import_issue(self, error_type: str) -> ConfigFlowResult: - """Create an issue about a YAML import failure.""" - async_create_issue( - self.hass, - DOMAIN, - f"deprecated_yaml_import_issue_{error_type}", - breaks_in_ha_version="2024.4.0", - is_fixable=False, - severity=IssueSeverity.ERROR, - translation_key="deprecated_yaml_import_issue", - translation_placeholders={ - "error_type": error_type, - "url": "/config/integrations/dashboard/add?domain=hydrawise", - }, - ) - return self.async_abort(reason=error_type) - - def _deprecated_yaml_issue(self) -> None: - """Create an issue about YAML deprecation.""" - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.4.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Hydrawise", - }, + self.hass.config_entries.async_update_entry( + self.reauth_entry, + data=self.reauth_entry.data + | {CONF_USERNAME: username, CONF_PASSWORD: password}, ) + await self.hass.config_entries.async_reload(self.reauth_entry.entry_id) + return self.async_abort(reason="reauth_successful") async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial setup.""" if user_input is not None: - api_key = user_input[CONF_API_KEY] - return await self._create_entry(api_key, on_failure=self._show_form) + username = user_input[CONF_USERNAME] + password = user_input[CONF_PASSWORD] + + return await self._create_or_update_entry( + username=username, password=password, on_failure=self._show_form + ) return self._show_form() def _show_form(self, error_type: str | None = None) -> ConfigFlowResult: @@ -91,21 +83,17 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = error_type return self.async_show_form( step_id="user", - data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), + data_schema=vol.Schema( + {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} + ), errors=errors, ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import data from YAML.""" - try: - result = await self._create_entry( - import_data.get(CONF_API_KEY, ""), - on_failure=self._import_issue, - ) - except AbortFlow: - self._deprecated_yaml_issue() - raise - - if result["type"] == FlowResultType.CREATE_ENTRY: - self._deprecated_yaml_issue() - return result + async def async_step_reauth( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth after updating config to username/password.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_user() diff --git a/homeassistant/components/hydrawise/sensor.py b/homeassistant/components/hydrawise/sensor.py index eedeb4a07bc..84e9f979878 100644 --- a/homeassistant/components/hydrawise/sensor.py +++ b/homeassistant/components/hydrawise/sensor.py @@ -5,20 +5,16 @@ from __future__ import annotations from datetime import datetime from pydrawise.schema import Zone -import voluptuous as vol from homeassistant.components.sensor import ( - PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MONITORED_CONDITIONS, UnitOfTime +from homeassistant.const import UnitOfTime from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util from .const import DOMAIN @@ -39,32 +35,10 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( ) SENSOR_KEYS: list[str] = [desc.key for desc in SENSOR_TYPES] - -# Deprecated since Home Assistant 2023.10.0 -# Can be removed completely in 2024.4.0 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_KEYS): vol.All( - cv.ensure_list, [vol.In(SENSOR_KEYS)] - ) - } -) - TWO_YEAR_SECONDS = 60 * 60 * 24 * 365 * 2 WATERING_TIME_ICON = "mdi:water-pump" -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a sensor for a Hydrawise device.""" - # We don't need to trigger import flow from here as it's triggered from `__init__.py` - return # pragma: no cover - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, diff --git a/homeassistant/components/hydrawise/strings.json b/homeassistant/components/hydrawise/strings.json index 8f079abcc7d..ee5cc0a541c 100644 --- a/homeassistant/components/hydrawise/strings.json +++ b/homeassistant/components/hydrawise/strings.json @@ -2,8 +2,11 @@ "config": { "step": { "user": { + "title": "Hydrawise Login", + "description": "Please provide the username and password for your Hydrawise cloud account:", "data": { - "api_key": "[%key:common::config_flow::data::api_key%]" + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" } } }, @@ -13,13 +16,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" - } - }, - "issues": { - "deprecated_yaml_import_issue": { - "title": "The Hydrawise YAML configuration import failed", - "description": "Configuring Hydrawise using YAML is being removed but there was an {error_type} error importing your YAML configuration.\n\nEnsure connection to Hydrawise works and restart Home Assistant to try again or remove the Hydrawise YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/homeassistant/components/hydrawise/switch.py b/homeassistant/components/hydrawise/switch.py index 49106a5938a..2dc459e7dd4 100644 --- a/homeassistant/components/hydrawise/switch.py +++ b/homeassistant/components/hydrawise/switch.py @@ -6,28 +6,18 @@ from datetime import timedelta from typing import Any from pydrawise.schema import Zone -import voluptuous as vol from homeassistant.components.switch import ( - PLATFORM_SCHEMA, SwitchDeviceClass, SwitchEntity, SwitchEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MONITORED_CONDITIONS from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import dt as dt_util -from .const import ( - ALLOWED_WATERING_TIME, - CONF_WATERING_TIME, - DEFAULT_WATERING_TIME, - DOMAIN, -) +from .const import DEFAULT_WATERING_TIME, DOMAIN from .coordinator import HydrawiseDataUpdateCoordinator from .entity import HydrawiseEntity @@ -46,30 +36,6 @@ SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( SWITCH_KEYS: list[str] = [desc.key for desc in SWITCH_TYPES] -# Deprecated since Home Assistant 2023.10.0 -# Can be removed completely in 2024.4.0 -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=SWITCH_KEYS): vol.All( - cv.ensure_list, [vol.In(SWITCH_KEYS)] - ), - vol.Optional( - CONF_WATERING_TIME, default=DEFAULT_WATERING_TIME.total_seconds() // 60 - ): vol.All(vol.In(ALLOWED_WATERING_TIME)), - } -) - - -def setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a sensor for a Hydrawise device.""" - # We don't need to trigger import flow from here as it's triggered from `__init__.py` - return # pragma: no cover - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/hyperion/sensor.py b/homeassistant/components/hyperion/sensor.py index f537c282686..ad972806ae5 100644 --- a/homeassistant/components/hyperion/sensor.py +++ b/homeassistant/components/hyperion/sensor.py @@ -191,13 +191,13 @@ class HyperionVisiblePrioritySensor(HyperionSensor): if priority[KEY_COMPONENTID] == "COLOR": state_value = priority[KEY_VALUE][KEY_RGB] else: - state_value = priority[KEY_OWNER] + state_value = priority.get(KEY_OWNER) attrs = { "component_id": priority[KEY_COMPONENTID], "origin": priority[KEY_ORIGIN], "priority": priority[KEY_PRIORITY], - "owner": priority[KEY_OWNER], + "owner": priority.get(KEY_OWNER), } if priority[KEY_COMPONENTID] == "COLOR": diff --git a/homeassistant/components/imap/coordinator.py b/homeassistant/components/imap/coordinator.py index 53d24044b53..c0123b89ee4 100644 --- a/homeassistant/components/imap/coordinator.py +++ b/homeassistant/components/imap/coordinator.py @@ -125,13 +125,13 @@ class ImapMessage: return str(part.get_payload()) @property - def headers(self) -> dict[str, tuple[str,]]: + def headers(self) -> dict[str, tuple[str, ...]]: """Get the email headers.""" - header_base: dict[str, tuple[str,]] = {} + header_base: dict[str, tuple[str, ...]] = {} for key, value in self.email_message.items(): - header_instances: tuple[str,] = (str(value),) + header_instances: tuple[str, ...] = (str(value),) if header_base.setdefault(key, header_instances) != header_instances: - header_base[key] += header_instances # type: ignore[assignment] + header_base[key] += header_instances return header_base @property diff --git a/homeassistant/components/input_text/__init__.py b/homeassistant/components/input_text/__init__.py index 52788066ba2..55b43ee8a1e 100644 --- a/homeassistant/components/input_text/__init__.py +++ b/homeassistant/components/input_text/__init__.py @@ -264,7 +264,7 @@ class InputText(collection.CollectionEntity, RestoreEntity): return state = await self.async_get_last_state() - value: str | None = state and state.state # type: ignore[assignment] + value = state.state if state else None # Check against None because value can be 0 if value is not None and self._minimum <= len(value) <= self._maximum: diff --git a/homeassistant/components/iotawatt/coordinator.py b/homeassistant/components/iotawatt/coordinator.py index e741c7a5a27..4f9ac1f94b7 100644 --- a/homeassistant/components/iotawatt/coordinator.py +++ b/homeassistant/components/iotawatt/coordinator.py @@ -63,6 +63,7 @@ class IotawattUpdater(DataUpdateCoordinator): self.entry.data.get(CONF_USERNAME), self.entry.data.get(CONF_PASSWORD), integratedInterval="d", + includeNonTotalSensors=False, ) try: is_authenticated = await api.connect() diff --git a/homeassistant/components/iotawatt/manifest.json b/homeassistant/components/iotawatt/manifest.json index 5beaa1e318c..5fd178389d9 100644 --- a/homeassistant/components/iotawatt/manifest.json +++ b/homeassistant/components/iotawatt/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/iotawatt", "iot_class": "local_polling", "loggers": ["iotawattpy"], - "requirements": ["ha-iotawattpy==0.1.1"] + "requirements": ["ha-iotawattpy==0.1.2"] } diff --git a/homeassistant/components/isy994/light.py b/homeassistant/components/isy994/light.py index 69701534840..b9b269d9ca3 100644 --- a/homeassistant/components/isy994/light.py +++ b/homeassistant/components/isy994/light.py @@ -114,8 +114,5 @@ class ISYLightEntity(ISYNodeEntity, LightEntity, RestoreEntity): if not (last_state := await self.async_get_last_state()): return - if ( - ATTR_LAST_BRIGHTNESS in last_state.attributes - and last_state.attributes[ATTR_LAST_BRIGHTNESS] - ): - self._last_brightness = last_state.attributes[ATTR_LAST_BRIGHTNESS] + if last_brightness := last_state.attributes.get(ATTR_LAST_BRIGHTNESS): + self._last_brightness = last_brightness diff --git a/homeassistant/components/jellyfin/__init__.py b/homeassistant/components/jellyfin/__init__.py index c24f06d7b19..de9fa805f02 100644 --- a/homeassistant/components/jellyfin/__init__.py +++ b/homeassistant/components/jellyfin/__init__.py @@ -73,6 +73,6 @@ async def async_remove_config_entry_device( return not device_entry.identifiers.intersection( ( (DOMAIN, coordinator.server_id), - *((DOMAIN, id) for id in coordinator.device_ids), + *((DOMAIN, device_id) for device_id in coordinator.device_ids), ) ) diff --git a/homeassistant/components/jvc_projector/__init__.py b/homeassistant/components/jvc_projector/__init__.py index 28e4cc995bb..8ce1fb46e3d 100644 --- a/homeassistant/components/jvc_projector/__init__.py +++ b/homeassistant/components/jvc_projector/__init__.py @@ -18,7 +18,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from .const import DOMAIN from .coordinator import JvcProjectorDataUpdateCoordinator -PLATFORMS = [Platform.BINARY_SENSOR, Platform.REMOTE, Platform.SENSOR] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.REMOTE, Platform.SELECT, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/jvc_projector/icons.json b/homeassistant/components/jvc_projector/icons.json index c70ded78cb4..a0404b328e1 100644 --- a/homeassistant/components/jvc_projector/icons.json +++ b/homeassistant/components/jvc_projector/icons.json @@ -8,6 +8,11 @@ } } }, + "select": { + "input": { + "default": "mdi:hdmi-port" + } + }, "sensor": { "jvc_power_status": { "default": "mdi:power-plug-off", diff --git a/homeassistant/components/jvc_projector/select.py b/homeassistant/components/jvc_projector/select.py new file mode 100644 index 00000000000..1395637fad1 --- /dev/null +++ b/homeassistant/components/jvc_projector/select.py @@ -0,0 +1,77 @@ +"""Select platform for the jvc_projector integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Final + +from jvcprojector import JvcProjector, const + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import JvcProjectorDataUpdateCoordinator +from .const import DOMAIN +from .entity import JvcProjectorEntity + + +@dataclass(frozen=True, kw_only=True) +class JvcProjectorSelectDescription(SelectEntityDescription): + """Describes JVC Projector select entities.""" + + command: Callable[[JvcProjector, str], Awaitable[None]] + + +OPTIONS: Final[dict[str, dict[str, str]]] = { + "input": {const.HDMI1: const.REMOTE_HDMI_1, const.HDMI2: const.REMOTE_HDMI_2} +} + +SELECTS: Final[list[JvcProjectorSelectDescription]] = [ + JvcProjectorSelectDescription( + key="input", + translation_key="input", + options=list(OPTIONS["input"]), + command=lambda device, option: device.remote(OPTIONS["input"][option]), + ) +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the JVC Projector platform from a config entry.""" + coordinator: JvcProjectorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + + async_add_entities( + JvcProjectorSelectEntity(coordinator, description) for description in SELECTS + ) + + +class JvcProjectorSelectEntity(JvcProjectorEntity, SelectEntity): + """Representation of a JVC Projector select entity.""" + + entity_description: JvcProjectorSelectDescription + + def __init__( + self, + coordinator: JvcProjectorDataUpdateCoordinator, + description: JvcProjectorSelectDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + return self.coordinator.data[self.entity_description.key] + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.command(self.coordinator.device, option) diff --git a/homeassistant/components/jvc_projector/strings.json b/homeassistant/components/jvc_projector/strings.json index 9991fa1cf67..b89139cbab3 100644 --- a/homeassistant/components/jvc_projector/strings.json +++ b/homeassistant/components/jvc_projector/strings.json @@ -38,6 +38,15 @@ "name": "[%key:component::sensor::entity_component::power::name%]" } }, + "select": { + "input": { + "name": "Input", + "state": { + "hdmi1": "HDMI 1", + "hdmi2": "HDMI 2" + } + } + }, "sensor": { "jvc_power_status": { "name": "Power status", diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index c84d53d6039..da68dc36a6d 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -197,11 +197,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: [ platform for platform in SUPPORTED_PLATFORMS - if platform in config and platform not in (Platform.SENSOR, Platform.NOTIFY) + if platform in config and platform is not Platform.SENSOR ], ) - # set up notify platform, no entry support for notify component yet + # set up notify service for backwards compatibility - remove 2024.11 if NotifySchema.PLATFORM in config: hass.async_create_task( discovery.async_load_platform( @@ -232,7 +232,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: platform for platform in SUPPORTED_PLATFORMS if platform in hass.data[DATA_KNX_CONFIG] - and platform not in (Platform.SENSOR, Platform.NOTIFY) + and platform is not Platform.SENSOR ], ], ) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index af0c6b8d01c..77f3db3f9f3 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -4,7 +4,7 @@ "after_dependencies": ["panel_custom"], "codeowners": ["@Julius2342", "@farmio", "@marvin-w"], "config_flow": true, - "dependencies": ["file_upload", "websocket_api"], + "dependencies": ["file_upload", "repairs", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/knx", "integration_type": "hub", "iot_class": "local_push", diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index 74ae86dc5d0..e208e4fd646 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -1,4 +1,4 @@ -"""Support for KNX/IP notification services.""" +"""Support for KNX/IP notifications.""" from __future__ import annotations @@ -7,13 +7,16 @@ from typing import Any from xknx import XKNX from xknx.devices import Notification as XknxNotification -from homeassistant.components.notify import BaseNotificationService -from homeassistant.const import CONF_NAME, CONF_TYPE +from homeassistant import config_entries +from homeassistant.components.notify import BaseNotificationService, NotifyEntity +from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, CONF_TYPE, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .schema import NotifySchema +from .knx_entity import KnxEntity +from .repairs import migrate_notify_issue async def async_get_service( @@ -25,16 +28,11 @@ async def async_get_service( if discovery_info is None: return None - if platform_config := hass.data[DATA_KNX_CONFIG].get(NotifySchema.PLATFORM): + if platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.NOTIFY): xknx: XKNX = hass.data[DOMAIN].xknx notification_devices = [ - XknxNotification( - xknx, - name=device_config[CONF_NAME], - group_address=device_config[KNX_ADDRESS], - value_type=device_config[CONF_TYPE], - ) + _create_notification_instance(xknx, device_config) for device_config in platform_config ] return KNXNotificationService(notification_devices) @@ -59,6 +57,7 @@ class KNXNotificationService(BaseNotificationService): async def async_send_message(self, message: str = "", **kwargs: Any) -> None: """Send a notification to knx bus.""" + migrate_notify_issue(self.hass) if "target" in kwargs: await self._async_send_to_device(message, kwargs["target"]) else: @@ -74,3 +73,41 @@ class KNXNotificationService(BaseNotificationService): for device in self.devices: if device.name in names: await device.set(message) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: config_entries.ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up notify(s) for KNX platform.""" + xknx: XKNX = hass.data[DOMAIN].xknx + config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NOTIFY] + + async_add_entities(KNXNotify(xknx, entity_config) for entity_config in config) + + +def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotification: + """Return a KNX Notification to be used within XKNX.""" + return XknxNotification( + xknx, + name=config[CONF_NAME], + group_address=config[KNX_ADDRESS], + value_type=config[CONF_TYPE], + ) + + +class KNXNotify(NotifyEntity, KnxEntity): + """Representation of a KNX notification entity.""" + + _device: XknxNotification + + def __init__(self, xknx: XKNX, config: ConfigType) -> None: + """Initialize a KNX notification.""" + super().__init__(_create_notification_instance(xknx, config)) + self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) + self._attr_unique_id = str(self._device.remote_value.group_address) + + async def async_send_message(self, message: str) -> None: + """Send a notification to knx bus.""" + await self._device.set(message) diff --git a/homeassistant/components/knx/repairs.py b/homeassistant/components/knx/repairs.py new file mode 100644 index 00000000000..f0a92850d36 --- /dev/null +++ b/homeassistant/components/knx/repairs.py @@ -0,0 +1,36 @@ +"""Repairs support for KNX.""" + +from __future__ import annotations + +from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import issue_registry as ir + +from .const import DOMAIN + + +@callback +def migrate_notify_issue(hass: HomeAssistant) -> None: + """Create issue for notify service deprecation.""" + ir.async_create_issue( + hass, + DOMAIN, + "migrate_notify", + breaks_in_ha_version="2024.11.0", + issue_domain=Platform.NOTIFY.value, + is_fixable=True, + is_persistent=True, + translation_key="migrate_notify", + severity=ir.IssueSeverity.WARNING, + ) + + +async def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, +) -> RepairsFlow: + """Create flow.""" + assert issue_id == "migrate_notify" + return ConfirmRepairFlow() diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index 39670b4f92b..462605c3985 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -750,6 +750,7 @@ class NotifySchema(KNXPlatformSchema): vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_TYPE, default="latin_1"): string_type_validator, vol.Required(KNX_ADDRESS): ga_validator, + vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA, } ) diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 39b96dddf8f..a69ba106ffd 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -384,5 +384,18 @@ "name": "[%key:common::action::reload%]", "description": "Reloads the KNX integration." } + }, + "issues": { + "migrate_notify": { + "title": "Migration of KNX notify service", + "fix_flow": { + "step": { + "confirm": { + "description": "The KNX `notify` service has been migrated. New `notify` entities are available now.\n\nUpdate any automations to use the new `notify.send_message` exposed by these new entities. When this is done, fix this issue and restart Home Assistant.", + "title": "Disable legacy KNX notify service" + } + } + } + } } } diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 7901b0bb3fa..412fe9ee3ce 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -147,7 +147,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): raise ConfigEntryAuthFailed(msg) from ex except RequestNotSuccessful as ex: _LOGGER.debug(ex, exc_info=True) - raise UpdateFailed("Querying API failed. Error: %s" % ex) from ex + raise UpdateFailed(f"Querying API failed. Error: {ex}") from ex def async_get_ble_device(self) -> BLEDevice | None: """Get a Bleak Client for the machine.""" diff --git a/homeassistant/components/linear_garage_door/__init__.py b/homeassistant/components/linear_garage_door/__init__.py index e21d8eaba58..16e743e00b5 100644 --- a/homeassistant/components/linear_garage_door/__init__.py +++ b/homeassistant/components/linear_garage_door/__init__.py @@ -15,7 +15,7 @@ PLATFORMS: list[Platform] = [Platform.COVER] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Linear Garage Door from a config entry.""" - coordinator = LinearUpdateCoordinator(hass, entry) + coordinator = LinearUpdateCoordinator(hass) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/linear_garage_door/coordinator.py b/homeassistant/components/linear_garage_door/coordinator.py index b771b552b62..91ff0165163 100644 --- a/homeassistant/components/linear_garage_door/coordinator.py +++ b/homeassistant/components/linear_garage_door/coordinator.py @@ -2,9 +2,11 @@ from __future__ import annotations +from collections.abc import Awaitable, Callable +from dataclasses import dataclass from datetime import timedelta import logging -from typing import Any +from typing import Any, TypeVar from linear_garage_door import Linear from linear_garage_door.errors import InvalidLoginError @@ -17,46 +19,58 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator _LOGGER = logging.getLogger(__name__) +_T = TypeVar("_T") -class LinearUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + +@dataclass +class LinearDevice: + """Linear device dataclass.""" + + name: str + subdevices: dict[str, dict[str, str]] + + +class LinearUpdateCoordinator(DataUpdateCoordinator[dict[str, LinearDevice]]): """DataUpdateCoordinator for Linear.""" - _email: str - _password: str - _device_id: str - _site_id: str - _devices: list[dict[str, list[str] | str]] | None - _linear: Linear + _devices: list[dict[str, Any]] | None = None + config_entry: ConfigEntry - def __init__( - self, - hass: HomeAssistant, - entry: ConfigEntry, - ) -> None: + def __init__(self, hass: HomeAssistant) -> None: """Initialize DataUpdateCoordinator for Linear.""" - self._email = entry.data["email"] - self._password = entry.data["password"] - self._device_id = entry.data["device_id"] - self._site_id = entry.data["site_id"] - self._devices = None - super().__init__( hass, _LOGGER, name="Linear Garage Door", update_interval=timedelta(seconds=60), ) + self.site_id = self.config_entry.data["site_id"] - async def _async_update_data(self) -> dict[str, Any]: + async def _async_update_data(self) -> dict[str, LinearDevice]: """Get the data for Linear.""" - linear = Linear() + async def update_data(linear: Linear) -> dict[str, Any]: + if not self._devices: + self._devices = await linear.get_devices(self.site_id) + data = {} + + for device in self._devices: + device_id = str(device["id"]) + state = await linear.get_device_state(device_id) + data[device_id] = LinearDevice(device["name"], state) + return data + + return await self.execute(update_data) + + async def execute(self, func: Callable[[Linear], Awaitable[_T]]) -> _T: + """Execute an API call.""" + linear = Linear() try: await linear.login( - email=self._email, - password=self._password, - device_id=self._device_id, + email=self.config_entry.data["email"], + password=self.config_entry.data["password"], + device_id=self.config_entry.data["device_id"], client_session=async_get_clientsession(self.hass), ) except InvalidLoginError as err: @@ -66,17 +80,6 @@ class LinearUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): ): raise ConfigEntryAuthFailed from err raise ConfigEntryNotReady from err - - if not self._devices: - self._devices = await linear.get_devices(self._site_id) - - data = {} - - for device in self._devices: - device_id = str(device["id"]) - state = await linear.get_device_state(device_id) - data[device_id] = {"name": device["name"], "subdevices": state} - + result = await func(linear) await linear.close() - - return data + return result diff --git a/homeassistant/components/linear_garage_door/cover.py b/homeassistant/components/linear_garage_door/cover.py index 3474e9d3acb..b3d720e531a 100644 --- a/homeassistant/components/linear_garage_door/cover.py +++ b/homeassistant/components/linear_garage_door/cover.py @@ -3,8 +3,6 @@ from datetime import timedelta from typing import Any -from linear_garage_door import Linear - from homeassistant.components.cover import ( CoverDeviceClass, CoverEntity, @@ -12,13 +10,12 @@ from homeassistant.components.cover import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import LinearUpdateCoordinator +from .coordinator import LinearDevice, LinearUpdateCoordinator SUPPORTED_SUBDEVICES = ["GDO"] PARALLEL_UPDATES = 1 @@ -32,118 +29,89 @@ async def async_setup_entry( ) -> None: """Set up Linear Garage Door cover.""" coordinator: LinearUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - data = coordinator.data - device_list: list[LinearCoverEntity] = [] - - for device_id in data: - device_list.extend( - LinearCoverEntity( - device_id=device_id, - device_name=data[device_id]["name"], - subdevice=subdev, - config_entry=config_entry, - coordinator=coordinator, - ) - for subdev in data[device_id]["subdevices"] - if subdev in SUPPORTED_SUBDEVICES - ) - async_add_entities(device_list) + async_add_entities( + LinearCoverEntity(coordinator, device_id, sub_device_id) + for device_id, device_data in coordinator.data.items() + for sub_device_id in device_data.subdevices + if sub_device_id in SUPPORTED_SUBDEVICES + ) class LinearCoverEntity(CoordinatorEntity[LinearUpdateCoordinator], CoverEntity): """Representation of a Linear cover.""" _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + _attr_has_entity_name = True + _attr_name = None + _attr_device_class = CoverDeviceClass.GARAGE def __init__( self, - device_id: str, - device_name: str, - subdevice: str, - config_entry: ConfigEntry, coordinator: LinearUpdateCoordinator, + device_id: str, + sub_device_id: str, ) -> None: """Init with device ID and name.""" super().__init__(coordinator) - - self._attr_has_entity_name = True - self._attr_name = None self._device_id = device_id - self._device_name = device_name - self._subdevice = subdevice - self._attr_device_class = CoverDeviceClass.GARAGE - self._attr_unique_id = f"{device_id}-{subdevice}" - self._config_entry = config_entry - - def _get_data(self, data_property: str) -> str: - """Get a property of the subdevice.""" - return str( - self.coordinator.data[self._device_id]["subdevices"][self._subdevice].get( - data_property - ) - ) - - @property - def device_info(self) -> DeviceInfo: - """Return device info of a garage door.""" - return DeviceInfo( - identifiers={(DOMAIN, self._device_id)}, - name=self._device_name, + self._sub_device_id = sub_device_id + self._attr_unique_id = f"{device_id}-{sub_device_id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, sub_device_id)}, + name=self.linear_device.name, manufacturer="Linear", model="Garage Door Opener", ) + @property + def linear_device(self) -> LinearDevice: + """Return the Linear device.""" + return self.coordinator.data[self._device_id] + + @property + def sub_device(self) -> dict[str, str]: + """Return the subdevice.""" + return self.linear_device.subdevices[self._sub_device_id] + @property def is_closed(self) -> bool: """Return if cover is closed.""" - return bool(self._get_data("Open_B") == "false") + return self.sub_device.get("Open_B") == "false" @property def is_opened(self) -> bool: """Return if cover is open.""" - return bool(self._get_data("Open_B") == "true") + return self.sub_device.get("Open_B") == "true" @property def is_opening(self) -> bool: """Return if cover is opening.""" - return bool(self._get_data("Opening_P") == "0") + return self.sub_device.get("Opening_P") == "0" @property def is_closing(self) -> bool: """Return if cover is closing.""" - return bool(self._get_data("Opening_P") == "100") + return self.sub_device.get("Opening_P") == "100" async def async_close_cover(self, **kwargs: Any) -> None: """Close the garage door.""" if self.is_closed: return - linear = Linear() - - await linear.login( - email=self._config_entry.data["email"], - password=self._config_entry.data["password"], - device_id=self._config_entry.data["device_id"], - client_session=async_get_clientsession(self.hass), + await self.coordinator.execute( + lambda linear: linear.operate_device( + self._device_id, self._sub_device_id, "Close" + ) ) - await linear.operate_device(self._device_id, self._subdevice, "Close") - await linear.close() - async def async_open_cover(self, **kwargs: Any) -> None: """Open the garage door.""" if self.is_opened: return - linear = Linear() - - await linear.login( - email=self._config_entry.data["email"], - password=self._config_entry.data["password"], - device_id=self._config_entry.data["device_id"], - client_session=async_get_clientsession(self.hass), + await self.coordinator.execute( + lambda linear: linear.operate_device( + self._device_id, self._sub_device_id, "Open" + ) ) - - await linear.operate_device(self._device_id, self._subdevice, "Open") - await linear.close() diff --git a/homeassistant/components/linear_garage_door/diagnostics.py b/homeassistant/components/linear_garage_door/diagnostics.py index fc4906daa77..21414f02f87 100644 --- a/homeassistant/components/linear_garage_door/diagnostics.py +++ b/homeassistant/components/linear_garage_door/diagnostics.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import asdict from typing import Any from homeassistant.components.diagnostics import async_redact_data @@ -23,5 +24,8 @@ async def async_get_config_entry_diagnostics( return { "entry": async_redact_data(entry.as_dict(), TO_REDACT), - "coordinator_data": coordinator.data, + "coordinator_data": { + device_id: asdict(device_data) + for device_id, device_data in coordinator.data.items() + }, } diff --git a/homeassistant/components/litterrobot/manifest.json b/homeassistant/components/litterrobot/manifest.json index 66ade5f356c..88396f9f9c1 100644 --- a/homeassistant/components/litterrobot/manifest.json +++ b/homeassistant/components/litterrobot/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["pylitterbot"], - "requirements": ["pylitterbot==2023.4.11"] + "requirements": ["pylitterbot==2023.5.0"] } diff --git a/homeassistant/components/local_calendar/manifest.json b/homeassistant/components/local_calendar/manifest.json index 1c13970503d..b1c7d6a3a34 100644 --- a/homeassistant/components/local_calendar/manifest.json +++ b/homeassistant/components/local_calendar/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/local_calendar", "iot_class": "local_polling", "loggers": ["ical"], - "requirements": ["ical==7.0.3"] + "requirements": ["ical==8.0.0"] } diff --git a/homeassistant/components/local_todo/manifest.json b/homeassistant/components/local_todo/manifest.json index 3bcb8af9f43..44c76a56a8f 100644 --- a/homeassistant/components/local_todo/manifest.json +++ b/homeassistant/components/local_todo/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_todo", "iot_class": "local_polling", - "requirements": ["ical==7.0.3"] + "requirements": ["ical==8.0.0"] } diff --git a/homeassistant/components/lock/icons.json b/homeassistant/components/lock/icons.json index 1bf48f2ab40..0ce2e70d372 100644 --- a/homeassistant/components/lock/icons.json +++ b/homeassistant/components/lock/icons.json @@ -5,7 +5,7 @@ "state": { "jammed": "mdi:lock-alert", "locking": "mdi:lock-clock", - "unlocked": "mdi:lock-open", + "unlocked": "mdi:lock-open-variant", "unlocking": "mdi:lock-clock" } } @@ -13,6 +13,6 @@ "services": { "lock": "mdi:lock", "open": "mdi:door-open", - "unlock": "mdi:lock-open" + "unlock": "mdi:lock-open-variant" } } diff --git a/homeassistant/components/lutron/__init__.py b/homeassistant/components/lutron/__init__.py index 517eb4c8350..828182547c2 100644 --- a/homeassistant/components/lutron/__init__.py +++ b/homeassistant/components/lutron/__init__.py @@ -3,31 +3,25 @@ from dataclasses import dataclass import logging -from pylutron import Button, Keypad, Led, Lutron, LutronEvent, OccupancyGroup, Output +from pylutron import Button, Keypad, Led, Lutron, OccupancyGroup, Output import voluptuous as vol from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ID, - CONF_HOST, - CONF_PASSWORD, - CONF_USERNAME, - Platform, -) +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.helpers.config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType -from homeassistant.util import slugify from .const import DOMAIN PLATFORMS = [ Platform.BINARY_SENSOR, Platform.COVER, + Platform.EVENT, Platform.FAN, Platform.LIGHT, Platform.SCENE, @@ -105,69 +99,13 @@ async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: return True -class LutronButton: - """Representation of a button on a Lutron keypad. - - This is responsible for firing events as keypad buttons are pressed - (and possibly released, depending on the button type). It is not - represented as an entity; it simply fires events. - """ - - def __init__( - self, hass: HomeAssistant, area_name: str, keypad: Keypad, button: Button - ) -> None: - """Register callback for activity on the button.""" - name = f"{keypad.name}: {button.name}" - if button.name == "Unknown Button": - name += f" {button.number}" - self._hass = hass - self._has_release_event = ( - button.button_type is not None and "RaiseLower" in button.button_type - ) - self._id = slugify(name) - self._keypad = keypad - self._area_name = area_name - self._button_name = button.name - self._button = button - self._event = "lutron_event" - self._full_id = slugify(f"{area_name} {name}") - self._uuid = button.uuid - - button.subscribe(self.button_callback, None) - - def button_callback( - self, _button: Button, _context: None, event: LutronEvent, _params: dict - ) -> None: - """Fire an event about a button being pressed or released.""" - # Events per button type: - # RaiseLower -> pressed/released - # SingleAction -> single - action = None - if self._has_release_event: - if event == Button.Event.PRESSED: - action = "pressed" - else: - action = "released" - elif event == Button.Event.PRESSED: - action = "single" - - if action: - data = { - ATTR_ID: self._id, - ATTR_ACTION: action, - ATTR_FULL_ID: self._full_id, - ATTR_UUID: self._uuid, - } - self._hass.bus.fire(self._event, data) - - @dataclass(slots=True, kw_only=True) class LutronData: """Storage class for platform global data.""" client: Lutron binary_sensors: list[tuple[str, OccupancyGroup]] - buttons: list[LutronButton] + buttons: list[tuple[str, Keypad, Button]] covers: list[tuple[str, Output]] fans: list[tuple[str, Output]] lights: list[tuple[str, Output]] @@ -273,8 +211,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b led.legacy_uuid, entry_data.client.guid, ) - - entry_data.buttons.append(LutronButton(hass, area.name, keypad, button)) + if button.button_type: + entry_data.buttons.append((area.name, keypad, button)) if area.occupancy_group is not None: entry_data.binary_sensors.append((area.name, area.occupancy_group)) platform = Platform.BINARY_SENSOR diff --git a/homeassistant/components/lutron/event.py b/homeassistant/components/lutron/event.py new file mode 100644 index 00000000000..710f942a006 --- /dev/null +++ b/homeassistant/components/lutron/event.py @@ -0,0 +1,109 @@ +"""Support for Lutron events.""" + +from enum import StrEnum + +from pylutron import Button, Keypad, Lutron, LutronEvent + +from homeassistant.components.event import EventEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_ID +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import slugify + +from . import ATTR_ACTION, ATTR_FULL_ID, ATTR_UUID, DOMAIN, LutronData +from .entity import LutronKeypad + + +class LutronEventType(StrEnum): + """Lutron event types.""" + + SINGLE_PRESS = "single_press" + PRESS = "press" + RELEASE = "release" + + +LEGACY_EVENT_TYPES: dict[LutronEventType, str] = { + LutronEventType.SINGLE_PRESS: "single", + LutronEventType.PRESS: "pressed", + LutronEventType.RELEASE: "released", +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Lutron event platform.""" + entry_data: LutronData = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + LutronEventEntity(area_name, keypad, button, entry_data.client) + for area_name, keypad, button in entry_data.buttons + ) + + +class LutronEventEntity(LutronKeypad, EventEntity): + """Representation of a Lutron keypad button.""" + + _attr_translation_key = "button" + + def __init__( + self, + area_name: str, + keypad: Keypad, + button: Button, + controller: Lutron, + ) -> None: + """Initialize the button.""" + super().__init__(area_name, button, controller, keypad) + if (name := button.name) == "Unknown Button": + name += f" {button.number}" + self._attr_name = name + self._has_release_event = ( + button.button_type is not None and "RaiseLower" in button.button_type + ) + if self._has_release_event: + self._attr_event_types = [LutronEventType.PRESS, LutronEventType.RELEASE] + else: + self._attr_event_types = [LutronEventType.SINGLE_PRESS] + + self._full_id = slugify(f"{area_name} {name}") + self._id = slugify(name) + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + self._lutron_device.subscribe(self.handle_event, None) + + async def async_will_remove_from_hass(self) -> None: + """Unregister callbacks.""" + await super().async_will_remove_from_hass() + # Temporary solution until https://github.com/thecynic/pylutron/pull/93 gets merged + self._lutron_device._subscribers.remove((self.handle_event, None)) # pylint: disable=protected-access + + @callback + def handle_event( + self, button: Button, _context: None, event: LutronEvent, _params: dict + ) -> None: + """Handle received event.""" + action: LutronEventType | None = None + if self._has_release_event: + if event == Button.Event.PRESSED: + action = LutronEventType.PRESS + else: + action = LutronEventType.RELEASE + elif event == Button.Event.PRESSED: + action = LutronEventType.SINGLE_PRESS + + if action: + data = { + ATTR_ID: self._id, + ATTR_ACTION: LEGACY_EVENT_TYPES[action], + ATTR_FULL_ID: self._full_id, + ATTR_UUID: button.uuid, + } + self.hass.bus.fire("lutron_event", data) + self._trigger_event(action) + self.async_write_ha_state() diff --git a/homeassistant/components/lutron/strings.json b/homeassistant/components/lutron/strings.json index efa0a35d81a..0212c8845d5 100644 --- a/homeassistant/components/lutron/strings.json +++ b/homeassistant/components/lutron/strings.json @@ -22,6 +22,21 @@ "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, + "entity": { + "event": { + "button": { + "state_attributes": { + "event_type": { + "state": { + "single_press": "Single press", + "press": "Press", + "release": "Release" + } + } + } + } + } + }, "issues": { "deprecated_yaml_import_issue_cannot_connect": { "title": "The Lutron YAML configuration import cannot connect to server", diff --git a/homeassistant/components/media_extractor/__init__.py b/homeassistant/components/media_extractor/__init__.py index 228a012a04f..56b768c26a2 100644 --- a/homeassistant/components/media_extractor/__init__.py +++ b/homeassistant/components/media_extractor/__init__.py @@ -55,7 +55,7 @@ CONFIG_SCHEMA = vol.Schema( ) -def setup(hass: HomeAssistant, config: ConfigType) -> bool: +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the media extractor service.""" async def extract_media_url(call: ServiceCall) -> ServiceResponse: @@ -114,7 +114,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: supports_response=SupportsResponse.ONLY, ) - hass.services.register( + hass.services.async_register( DOMAIN, SERVICE_PLAY_MEDIA, play_media, @@ -278,9 +278,9 @@ def get_best_stream_youtube(formats: list[dict[str, Any]]) -> str: return get_best_stream( [ - format - for format in formats - if format.get("acodec", "none") != "none" - and format.get("vcodec", "none") != "none" + stream_format + for stream_format in formats + if stream_format.get("acodec", "none") != "none" + and stream_format.get("vcodec", "none") != "none" ] ) diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index 0d1848e0d8e..bd7eed8235c 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -34,6 +34,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType @@ -234,6 +235,18 @@ async def async_modbus_setup( async def async_restart_hub(service: ServiceCall) -> None: """Restart Modbus hub.""" + async_create_issue( + hass, + DOMAIN, + "deprecated_restart", + breaks_in_ha_version="2024.11.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_restart", + ) + _LOGGER.warning( + "`modbus.restart`: is deprecated and will be removed in version 2024.11" + ) async_dispatcher_send(hass, SIGNAL_START_ENTITY) hub = hub_collect[service.data[ATTR_HUB]] await hub.async_restart() diff --git a/homeassistant/components/modbus/strings.json b/homeassistant/components/modbus/strings.json index 72d7a3ec5f1..f89f9a97d52 100644 --- a/homeassistant/components/modbus/strings.json +++ b/homeassistant/components/modbus/strings.json @@ -97,6 +97,10 @@ "no_entities": { "title": "Modbus {sub_1} contain no entities, entry not loaded.", "description": "Please add at least one entity to Modbus {sub_1} in your configuration.yaml file and restart Home Assistant to fix this issue." + }, + "deprecated_restart": { + "title": "`modbus.restart` is being removed", + "description": "Please use reload yaml via the developer tools in the UI instead of via the `modbus.restart` service." } } } diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 28cb7d0944b..cc1ae3ddce1 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -265,7 +265,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: conf: dict[str, Any] mqtt_data: MqttData - async def _setup_client() -> tuple[MqttData, dict[str, Any]]: + async def _setup_client( + client_available: asyncio.Future[bool], + ) -> tuple[MqttData, dict[str, Any]]: """Set up the MQTT client.""" # Fetch configuration conf = dict(entry.data) @@ -294,7 +296,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.add_update_listener(_async_config_entry_updated) ) - await mqtt_data.client.async_connect() + await mqtt_data.client.async_connect(client_available) return (mqtt_data, conf) client_available: asyncio.Future[bool] @@ -303,13 +305,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: else: client_available = hass.data[DATA_MQTT_AVAILABLE] - setup_ok: bool = False - try: - mqtt_data, conf = await _setup_client() - setup_ok = True - finally: - if not client_available.done(): - client_available.set_result(setup_ok) + mqtt_data, conf = await _setup_client(client_available) async def async_publish_service(call: ServiceCall) -> None: """Handle MQTT publish service calls.""" diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 978123e169c..f01b8e80b3d 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -3,12 +3,14 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine, Iterable +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +import contextlib from dataclasses import dataclass -from functools import lru_cache +from functools import lru_cache, partial from itertools import chain, groupby import logging from operator import attrgetter +import socket import ssl import time from typing import TYPE_CHECKING, Any @@ -35,10 +37,10 @@ from homeassistant.core import ( callback, ) from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import dispatcher_send +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType from homeassistant.loader import bind_hass -from homeassistant.util import dt as dt_util +from homeassistant.util.async_ import create_eager_task from homeassistant.util.logging import catch_log_exception from .const import ( @@ -92,6 +94,9 @@ INITIAL_SUBSCRIBE_COOLDOWN = 1.0 SUBSCRIBE_COOLDOWN = 0.1 UNSUBSCRIBE_COOLDOWN = 0.1 TIMEOUT_ACK = 10 +RECONNECT_INTERVAL_SECONDS = 10 + +SocketType = socket.socket | ssl.SSLSocket | Any SubscribePayloadType = str | bytes # Only bytes if encoding is None @@ -258,7 +263,9 @@ class MqttClientSetup: # However, that feature is not mandatory so we generate our own. client_id = mqtt.base62(uuid.uuid4().int, padding=22) transport = config.get(CONF_TRANSPORT, DEFAULT_TRANSPORT) - self._client = mqtt.Client(client_id, protocol=proto, transport=transport) + self._client = mqtt.Client( + client_id, protocol=proto, transport=transport, reconnect_on_failure=False + ) # Enable logging self._client.enable_logger() @@ -345,7 +352,7 @@ class EnsureJobAfterCooldown: return self._async_cancel_timer() - self._task = asyncio.create_task(self._async_job()) + self._task = create_eager_task(self._async_job()) self._task.add_done_callback(self._async_task_done) @callback @@ -404,12 +411,17 @@ class MQTT: self._ha_started = asyncio.Event() self._cleanup_on_unload: list[Callable[[], None]] = [] - self._paho_lock = asyncio.Lock() # Prevents parallel calls to the MQTT client + self._connection_lock = asyncio.Lock() self._pending_operations: dict[int, asyncio.Event] = {} self._pending_operations_condition = asyncio.Condition() self._subscribe_debouncer = EnsureJobAfterCooldown( INITIAL_SUBSCRIBE_COOLDOWN, self._async_perform_subscriptions ) + self._misc_task: asyncio.Task | None = None + self._reconnect_task: asyncio.Task | None = None + self._should_reconnect: bool = True + self._available_future: asyncio.Future[bool] | None = None + self._max_qos: dict[str, int] = {} # topic, max qos self._pending_subscriptions: dict[str, int] = {} # topic, qos self._unsubscribe_debouncer = EnsureJobAfterCooldown( @@ -456,25 +468,140 @@ class MQTT: while self._cleanup_on_unload: self._cleanup_on_unload.pop()() + @contextlib.asynccontextmanager + async def _async_connect_in_executor(self) -> AsyncGenerator[None, None]: + # While we are connecting in the executor we need to + # handle on_socket_open and on_socket_register_write + # in the executor as well. + mqttc = self._mqttc + try: + mqttc.on_socket_open = self._on_socket_open + mqttc.on_socket_register_write = self._on_socket_register_write + yield + finally: + # Once the executor job is done, we can switch back to + # handling these in the event loop. + mqttc.on_socket_open = self._async_on_socket_open + mqttc.on_socket_register_write = self._async_on_socket_register_write + def init_client(self) -> None: """Initialize paho client.""" - self._mqttc = MqttClientSetup(self.conf).client - self._mqttc.on_connect = self._mqtt_on_connect - self._mqttc.on_disconnect = self._mqtt_on_disconnect - self._mqttc.on_message = self._mqtt_on_message - self._mqttc.on_publish = self._mqtt_on_callback - self._mqttc.on_subscribe = self._mqtt_on_callback - self._mqttc.on_unsubscribe = self._mqtt_on_callback + mqttc = MqttClientSetup(self.conf).client + # on_socket_unregister_write and _async_on_socket_close + # are only ever called in the event loop + mqttc.on_socket_close = self._async_on_socket_close + mqttc.on_socket_unregister_write = self._async_on_socket_unregister_write + + # These will be called in the event loop + mqttc.on_connect = self._async_mqtt_on_connect + mqttc.on_disconnect = self._async_mqtt_on_disconnect + mqttc.on_message = self._async_mqtt_on_message + mqttc.on_publish = self._async_mqtt_on_callback + mqttc.on_subscribe = self._async_mqtt_on_callback + mqttc.on_unsubscribe = self._async_mqtt_on_callback if will := self.conf.get(CONF_WILL_MESSAGE, DEFAULT_WILL): will_message = PublishMessage(**will) - self._mqttc.will_set( + mqttc.will_set( topic=will_message.topic, payload=will_message.payload, qos=will_message.qos, retain=will_message.retain, ) + self._mqttc = mqttc + + async def _misc_loop(self) -> None: + """Start the MQTT client misc loop.""" + # pylint: disable=import-outside-toplevel + import paho.mqtt.client as mqtt + + while self._mqttc.loop_misc() == mqtt.MQTT_ERR_SUCCESS: + await asyncio.sleep(1) + + @callback + def _async_reader_callback(self, client: mqtt.Client) -> None: + """Handle reading data from the socket.""" + if (status := client.loop_read()) != 0: + self._async_on_disconnect(status) + + @callback + def _async_start_misc_loop(self) -> None: + """Start the misc loop.""" + if self._misc_task is None or self._misc_task.done(): + _LOGGER.debug("%s: Starting client misc loop", self.config_entry.title) + self._misc_task = self.config_entry.async_create_background_task( + self.hass, self._misc_loop(), name="mqtt misc loop" + ) + + def _on_socket_open( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Handle socket open.""" + self.loop.call_soon_threadsafe( + self._async_on_socket_open, client, userdata, sock + ) + + @callback + def _async_on_socket_open( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Handle socket open.""" + fileno = sock.fileno() + _LOGGER.debug("%s: connection opened %s", self.config_entry.title, fileno) + if fileno > -1: + self.loop.add_reader(sock, partial(self._async_reader_callback, client)) + self._async_start_misc_loop() + + @callback + def _async_on_socket_close( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Handle socket close.""" + fileno = sock.fileno() + _LOGGER.debug("%s: connection closed %s", self.config_entry.title, fileno) + # If socket close is called before the connect + # result is set make sure the first connection result is set + self._async_connection_result(False) + if fileno > -1: + self.loop.remove_reader(sock) + if self._misc_task is not None and not self._misc_task.done(): + self._misc_task.cancel() + + @callback + def _async_writer_callback(self, client: mqtt.Client) -> None: + """Handle writing data to the socket.""" + if (status := client.loop_write()) != 0: + self._async_on_disconnect(status) + + def _on_socket_register_write( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Register the socket for writing.""" + self.loop.call_soon_threadsafe( + self._async_on_socket_register_write, client, None, sock + ) + + @callback + def _async_on_socket_register_write( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Register the socket for writing.""" + fileno = sock.fileno() + _LOGGER.debug("%s: register write %s", self.config_entry.title, fileno) + if fileno > -1: + self.loop.add_writer(sock, partial(self._async_writer_callback, client)) + + @callback + def _async_on_socket_unregister_write( + self, client: mqtt.Client, userdata: Any, sock: SocketType + ) -> None: + """Unregister the socket for writing.""" + fileno = sock.fileno() + _LOGGER.debug("%s: unregister write %s", self.config_entry.title, fileno) + if fileno > -1: + self.loop.remove_writer(sock) + def _is_active_subscription(self, topic: str) -> bool: """Check if a topic has an active subscription.""" return topic in self._simple_subscriptions or any( @@ -485,10 +612,7 @@ class MQTT: self, topic: str, payload: PublishPayloadType, qos: int, retain: bool ) -> None: """Publish a MQTT message.""" - async with self._paho_lock: - msg_info = await self.hass.async_add_executor_job( - self._mqttc.publish, topic, payload, qos, retain - ) + msg_info = self._mqttc.publish(topic, payload, qos, retain) _LOGGER.debug( "Transmitting%s message on %s: '%s', mid: %s, qos: %s", " retained" if retain else "", @@ -500,37 +624,71 @@ class MQTT: _raise_on_error(msg_info.rc) await self._wait_for_mid(msg_info.mid) - async def async_connect(self) -> None: + async def async_connect(self, client_available: asyncio.Future[bool]) -> None: """Connect to the host. Does not process messages yet.""" # pylint: disable-next=import-outside-toplevel import paho.mqtt.client as mqtt result: int | None = None + self._available_future = client_available + self._should_reconnect = True try: - result = await self.hass.async_add_executor_job( - self._mqttc.connect, - self.conf[CONF_BROKER], - self.conf.get(CONF_PORT, DEFAULT_PORT), - self.conf.get(CONF_KEEPALIVE, DEFAULT_KEEPALIVE), - ) + async with self._connection_lock, self._async_connect_in_executor(): + result = await self.hass.async_add_executor_job( + self._mqttc.connect, + self.conf[CONF_BROKER], + self.conf.get(CONF_PORT, DEFAULT_PORT), + self.conf.get(CONF_KEEPALIVE, DEFAULT_KEEPALIVE), + ) except OSError as err: _LOGGER.error("Failed to connect to MQTT server due to exception: %s", err) + self._async_connection_result(False) + finally: + if result is not None and result != 0: + if result is not None: + _LOGGER.error( + "Failed to connect to MQTT server: %s", + mqtt.error_string(result), + ) + self._async_connection_result(False) - if result is not None and result != 0: - _LOGGER.error( - "Failed to connect to MQTT server: %s", mqtt.error_string(result) + @callback + def _async_connection_result(self, connected: bool) -> None: + """Handle a connection result.""" + if self._available_future and not self._available_future.done(): + self._available_future.set_result(connected) + + if connected: + self._async_cancel_reconnect() + elif self._should_reconnect and not self._reconnect_task: + self._reconnect_task = self.config_entry.async_create_background_task( + self.hass, self._reconnect_loop(), "mqtt reconnect loop" ) - self._mqttc.loop_start() + @callback + def _async_cancel_reconnect(self) -> None: + """Cancel the reconnect task.""" + if self._reconnect_task: + self._reconnect_task.cancel() + self._reconnect_task = None + + async def _reconnect_loop(self) -> None: + """Reconnect to the MQTT server.""" + while True: + if not self.connected: + try: + async with self._connection_lock, self._async_connect_in_executor(): + await self.hass.async_add_executor_job(self._mqttc.reconnect) + except OSError as err: + _LOGGER.debug( + "Error re-connecting to MQTT server due to exception: %s", err + ) + + await asyncio.sleep(RECONNECT_INTERVAL_SECONDS) async def async_disconnect(self) -> None: """Stop the MQTT client.""" - def stop() -> None: - """Stop the MQTT client.""" - # Do not disconnect, we want the broker to always publish will - self._mqttc.loop_stop() - def no_more_acks() -> bool: """Return False if there are unprocessed ACKs.""" return not any(not op.is_set() for op in self._pending_operations.values()) @@ -549,8 +707,10 @@ class MQTT: await self._pending_operations_condition.wait_for(no_more_acks) # stop the MQTT loop - async with self._paho_lock: - await self.hass.async_add_executor_job(stop) + async with self._connection_lock: + self._should_reconnect = False + self._async_cancel_reconnect() + self._mqttc.disconnect() @callback def async_restore_tracked_subscriptions( @@ -689,11 +849,8 @@ class MQTT: subscriptions: dict[str, int] = self._pending_subscriptions self._pending_subscriptions = {} - async with self._paho_lock: - subscription_list = list(subscriptions.items()) - result, mid = await self.hass.async_add_executor_job( - self._mqttc.subscribe, subscription_list - ) + subscription_list = list(subscriptions.items()) + result, mid = self._mqttc.subscribe(subscription_list) for topic, qos in subscriptions.items(): _LOGGER.debug("Subscribing to %s, mid: %s, qos: %s", topic, mid, qos) @@ -712,17 +869,15 @@ class MQTT: topics = list(self._pending_unsubscribes) self._pending_unsubscribes = set() - async with self._paho_lock: - result, mid = await self.hass.async_add_executor_job( - self._mqttc.unsubscribe, topics - ) + result, mid = self._mqttc.unsubscribe(topics) _raise_on_error(result) for topic in topics: _LOGGER.debug("Unsubscribing from %s, mid: %s", topic, mid) await self._wait_for_mid(mid) - def _mqtt_on_connect( + @callback + def _async_mqtt_on_connect( self, _mqttc: mqtt.Client, _userdata: None, @@ -739,14 +894,22 @@ class MQTT: import paho.mqtt.client as mqtt if result_code != mqtt.CONNACK_ACCEPTED: + if result_code in ( + mqtt.CONNACK_REFUSED_BAD_USERNAME_PASSWORD, + mqtt.CONNACK_REFUSED_NOT_AUTHORIZED, + ): + self._should_reconnect = False + self.hass.async_create_task(self.async_disconnect()) + self.config_entry.async_start_reauth(self.hass) _LOGGER.error( "Unable to connect to the MQTT broker: %s", mqtt.connack_string(result_code), ) + self._async_connection_result(False) return self.connected = True - dispatcher_send(self.hass, MQTT_CONNECTED) + async_dispatcher_send(self.hass, MQTT_CONNECTED) _LOGGER.info( "Connected to MQTT server %s:%s (%s)", self.conf[CONF_BROKER], @@ -754,7 +917,7 @@ class MQTT: result_code, ) - self.hass.create_task(self._async_resubscribe()) + self.hass.async_create_task(self._async_resubscribe()) if birth := self.conf.get(CONF_BIRTH_MESSAGE, DEFAULT_BIRTH): @@ -771,13 +934,17 @@ class MQTT: ) birth_message = PublishMessage(**birth) - asyncio.run_coroutine_threadsafe( - publish_birth_message(birth_message), self.hass.loop + self.config_entry.async_create_background_task( + self.hass, + publish_birth_message(birth_message), + name="mqtt birth message", ) else: # Update subscribe cooldown period to a shorter time self._subscribe_debouncer.set_timeout(SUBSCRIBE_COOLDOWN) + self._async_connection_result(True) + async def _async_resubscribe(self) -> None: """Resubscribe on reconnect.""" self._max_qos.clear() @@ -796,16 +963,6 @@ class MQTT: ) await self._async_perform_subscriptions() - def _mqtt_on_message( - self, _mqttc: mqtt.Client, _userdata: None, msg: mqtt.MQTTMessage - ) -> None: - """Message received callback.""" - # MQTT messages tend to be high volume, - # and since they come in via a thread and need to be processed in the event loop, - # we want to avoid hass.add_job since most of the time is spent calling - # inspect to figure out how to run the callback. - self.loop.call_soon_threadsafe(self._mqtt_handle_message, msg) - @lru_cache(None) # pylint: disable=method-cache-max-size-none def _matching_subscriptions(self, topic: str) -> list[Subscription]: subscriptions: list[Subscription] = [] @@ -819,7 +976,9 @@ class MQTT: return subscriptions @callback - def _mqtt_handle_message(self, msg: mqtt.MQTTMessage) -> None: + def _async_mqtt_on_message( + self, _mqttc: mqtt.Client, _userdata: None, msg: mqtt.MQTTMessage + ) -> None: topic = msg.topic # msg.topic is a property that decodes the topic to a string # every time it is accessed. Save the result to avoid @@ -831,8 +990,6 @@ class MQTT: msg.qos, msg.payload[0:8192], ) - timestamp = dt_util.utcnow() - subscriptions = self._matching_subscriptions(topic) msg_cache_by_subscription_topic: dict[str, ReceiveMessage] = {} @@ -870,7 +1027,7 @@ class MQTT: msg.qos, msg.retain, subscription_topic, - timestamp, + msg.timestamp, ) msg_cache_by_subscription_topic[subscription_topic] = receive_msg else: @@ -878,7 +1035,8 @@ class MQTT: self.hass.async_run_hass_job(subscription.job, receive_msg) self._mqtt_data.state_write_requests.process_write_state_requests(msg) - def _mqtt_on_callback( + @callback + def _async_mqtt_on_callback( self, _mqttc: mqtt.Client, _userdata: None, @@ -890,7 +1048,7 @@ class MQTT: # The callback signature for on_unsubscribe is different from on_subscribe # see https://github.com/eclipse/paho.mqtt.python/issues/687 # properties and reasoncodes are not used in Home Assistant - self.hass.create_task(self._mqtt_handle_mid(mid)) + self.hass.async_create_task(self._mqtt_handle_mid(mid)) async def _mqtt_handle_mid(self, mid: int) -> None: # Create the mid event if not created, either _mqtt_handle_mid or _wait_for_mid @@ -906,7 +1064,8 @@ class MQTT: if mid not in self._pending_operations: self._pending_operations[mid] = asyncio.Event() - def _mqtt_on_disconnect( + @callback + def _async_mqtt_on_disconnect( self, _mqttc: mqtt.Client, _userdata: None, @@ -914,8 +1073,19 @@ class MQTT: properties: mqtt.Properties | None = None, ) -> None: """Disconnected callback.""" + self._async_on_disconnect(result_code) + + @callback + def _async_on_disconnect(self, result_code: int) -> None: + if not self.connected: + # This function is re-entrant and may be called multiple times + # when there is a broken pipe error. + return + # If disconnect is called before the connect + # result is set make sure the first connection result is set + self._async_connection_result(False) self.connected = False - dispatcher_send(self.hass, MQTT_DISCONNECTED) + async_dispatcher_send(self.hass, MQTT_DISCONNECTED) _LOGGER.warning( "Disconnected from MQTT server %s:%s (%s)", self.conf[CONF_BROKER], diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 5bf0c9c1879..1a7dfbbc507 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections import OrderedDict -from collections.abc import Callable +from collections.abc import Callable, Mapping import queue from ssl import PROTOCOL_TLS_CLIENT, SSLContext, SSLError from types import MappingProxyType @@ -158,13 +158,46 @@ CERT_UPLOAD_SELECTOR = FileSelector( ) KEY_UPLOAD_SELECTOR = FileSelector(FileSelectorConfig(accept=".key,application/pkcs8")) +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): TEXT_SELECTOR, + vol.Required(CONF_PASSWORD): PASSWORD_SELECTOR, + } +) +PWD_NOT_CHANGED = "__**password_not_changed**__" + + +@callback +def update_password_from_user_input( + entry_password: str | None, user_input: dict[str, Any] +) -> dict[str, Any]: + """Update the password if the entry has been updated. + + As we want to avoid reflecting the stored password in the UI, + we replace the suggested value in the UI with a sentitel, + and we change it back here if it was changed. + """ + substituted_used_data = dict(user_input) + # Take out the password submitted + user_password: str | None = substituted_used_data.pop(CONF_PASSWORD, None) + # Only add the password if it has changed. + # If the sentinel password is submitted, we replace that with our current + # password from the config entry data. + password_changed = user_password is not None and user_password != PWD_NOT_CHANGED + password = user_password if password_changed else entry_password + if password is not None: + substituted_used_data[CONF_PASSWORD] = password + return substituted_used_data + class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 + entry: ConfigEntry | None _hassio_discovery: dict[str, Any] | None = None + _reauth_config_entry: ConfigEntry | None = None @staticmethod @callback @@ -183,6 +216,49 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_broker() + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication with Aladdin Connect.""" + + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm re-authentication with MQTT broker.""" + errors: dict[str, str] = {} + + assert self.entry is not None + if user_input: + substituted_used_data = update_password_from_user_input( + self.entry.data.get(CONF_PASSWORD), user_input + ) + new_entry_data = {**self.entry.data, **substituted_used_data} + if await self.hass.async_add_executor_job( + try_connection, + new_entry_data, + ): + return self.async_update_reload_and_abort( + self.entry, data=new_entry_data + ) + + errors["base"] = "invalid_auth" + + schema = self.add_suggested_values_to_schema( + REAUTH_SCHEMA, + { + CONF_USERNAME: self.entry.data.get(CONF_USERNAME), + CONF_PASSWORD: PWD_NOT_CHANGED, + }, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=schema, + errors=errors, + ) + async def async_step_broker( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -291,13 +367,17 @@ class MQTTOptionsFlowHandler(OptionsFlow): validated_user_input, errors, ): + self.broker_config.update( + update_password_from_user_input( + self.config_entry.data.get(CONF_PASSWORD), validated_user_input + ), + ) can_connect = await self.hass.async_add_executor_job( try_connection, - validated_user_input, + self.broker_config, ) if can_connect: - self.broker_config.update(validated_user_input) return await self.async_step_options() errors["base"] = "cannot_connect" @@ -598,7 +678,9 @@ async def async_get_broker_settings( current_broker = current_config.get(CONF_BROKER) current_port = current_config.get(CONF_PORT, DEFAULT_PORT) current_user = current_config.get(CONF_USERNAME) - current_pass = current_config.get(CONF_PASSWORD) + # Return the sentinel password to avoid exposure + current_entry_pass = current_config.get(CONF_PASSWORD) + current_pass = PWD_NOT_CHANGED if current_entry_pass else None # Treat the previous post as an update of the current settings # (if there was a basic broker setup step) diff --git a/homeassistant/components/mqtt/debug_info.py b/homeassistant/components/mqtt/debug_info.py index 7ff93a6bd06..e84dedde785 100644 --- a/homeassistant/components/mqtt/debug_info.py +++ b/homeassistant/components/mqtt/debug_info.py @@ -7,6 +7,7 @@ from collections.abc import Callable from dataclasses import dataclass import datetime as dt from functools import wraps +import time from typing import TYPE_CHECKING, Any from homeassistant.core import HomeAssistant @@ -57,7 +58,7 @@ class TimestampedPublishMessage: payload: PublishPayloadType qos: int retain: bool - timestamp: dt.datetime + timestamp: float def log_message( @@ -77,7 +78,7 @@ def log_message( "messages": deque([], STORED_MESSAGES), } msg = TimestampedPublishMessage( - topic, payload, qos, retain, timestamp=dt_util.utcnow() + topic, payload, qos, retain, timestamp=time.monotonic() ) entity_info["transmitted"][topic]["messages"].append(msg) @@ -175,6 +176,7 @@ def remove_trigger_discovery_data( def _info_for_entity(hass: HomeAssistant, entity_id: str) -> dict[str, Any]: entity_info = get_mqtt_data(hass).debug_info_entities[entity_id] + monotonic_time_diff = time.time() - time.monotonic() subscriptions = [ { "topic": topic, @@ -183,7 +185,10 @@ def _info_for_entity(hass: HomeAssistant, entity_id: str) -> dict[str, Any]: "payload": str(msg.payload), "qos": msg.qos, "retain": msg.retain, - "time": msg.timestamp, + "time": dt_util.utc_from_timestamp( + msg.timestamp + monotonic_time_diff, + tz=dt.UTC, + ), "topic": msg.topic, } for msg in subscription["messages"] @@ -199,7 +204,10 @@ def _info_for_entity(hass: HomeAssistant, entity_id: str) -> dict[str, Any]: "payload": str(msg.payload), "qos": msg.qos, "retain": msg.retain, - "time": msg.timestamp, + "time": dt_util.utc_from_timestamp( + msg.timestamp + monotonic_time_diff, + tz=dt.UTC, + ), "topic": msg.topic, } for msg in subscription["messages"] diff --git a/homeassistant/components/mqtt/manifest.json b/homeassistant/components/mqtt/manifest.json index 3a284c6719c..34370c82507 100644 --- a/homeassistant/components/mqtt/manifest.json +++ b/homeassistant/components/mqtt/manifest.json @@ -1,11 +1,11 @@ { "domain": "mqtt", "name": "MQTT", - "codeowners": ["@emontnemery", "@jbouwh"], + "codeowners": ["@emontnemery", "@jbouwh", "@bdraco"], "config_flow": true, "dependencies": ["file_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/mqtt", "iot_class": "local_push", - "quality_scale": "gold", + "quality_scale": "platinum", "requirements": ["paho-mqtt==1.6.1"] } diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index f53643268e7..17640c3e733 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -7,7 +7,6 @@ import asyncio from collections import deque from collections.abc import Callable, Coroutine from dataclasses import dataclass, field -import datetime as dt from enum import StrEnum import logging from typing import TYPE_CHECKING, Any, TypedDict @@ -67,7 +66,7 @@ class ReceiveMessage: qos: int retain: bool subscribed_topic: str - timestamp: dt.datetime + timestamp: float AsyncMessageCallbackType = Callable[[ReceiveMessage], Coroutine[Any, Any, None]] diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 2bd47db63bc..fc5f0bc4970 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -68,10 +68,23 @@ "data_description": { "discovery": "Option to enable MQTT automatic discovery." } + }, + "reauth_confirm": { + "title": "Re-authentication required with the MQTT broker", + "description": "The MQTT broker reported an authentication error. Please confirm the brokers correct usernname and password.", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::mqtt::config::step::broker::data_description::username%]", + "password": "[%key:component::mqtt::config::step::broker::data_description::password%]" + } } }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" }, "error": { @@ -84,6 +97,7 @@ "bad_client_cert_key": "Client certificate and private key are not a valid pair", "bad_ws_headers": "Supply valid HTTP headers as a JSON object", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_inclusion": "The client certificate and private key must be configurered together" } }, diff --git a/homeassistant/components/nam/manifest.json b/homeassistant/components/nam/manifest.json index a4ef9af9aee..7b1c584c293 100644 --- a/homeassistant/components/nam/manifest.json +++ b/homeassistant/components/nam/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_polling", "loggers": ["nettigo_air_monitor"], "quality_scale": "platinum", - "requirements": ["nettigo-air-monitor==2.2.2"], + "requirements": ["nettigo-air-monitor==3.0.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/nest/device_info.py b/homeassistant/components/nest/device_info.py index f269e3e89d6..33793fe836b 100644 --- a/homeassistant/components/nest/device_info.py +++ b/homeassistant/components/nest/device_info.py @@ -73,7 +73,7 @@ class NestDeviceInfo: """Return device suggested area based on the Google Home room.""" if parent_relations := self._device.parent_relations: items = sorted(parent_relations.items()) - names = [name for id, name in items] + names = [name for _, name in items] return " ".join(names) return None diff --git a/homeassistant/components/nest/media_source.py b/homeassistant/components/nest/media_source.py index d48006c449d..6c481806e4f 100644 --- a/homeassistant/components/nest/media_source.py +++ b/homeassistant/components/nest/media_source.py @@ -322,7 +322,7 @@ class NestMediaSource(MediaSource): devices = async_get_media_source_devices(self.hass) if not (device := devices.get(media_id.device_id)): raise Unresolvable( - "Unable to find device with identifier: %s" % item.identifier + f"Unable to find device with identifier: {item.identifier}" ) if not media_id.event_token: # The device resolves to the most recent event if available @@ -330,7 +330,7 @@ class NestMediaSource(MediaSource): last_event_id := await _async_get_recent_event_id(media_id, device) ): raise Unresolvable( - "Unable to resolve recent event for device: %s" % item.identifier + f"Unable to resolve recent event for device: {item.identifier}" ) media_id = last_event_id @@ -377,7 +377,7 @@ class NestMediaSource(MediaSource): # Browse either a device or events within a device if not (device := devices.get(media_id.device_id)): raise BrowseError( - "Unable to find device with identiifer: %s" % item.identifier + f"Unable to find device with identiifer: {item.identifier}" ) # Clip previews are a session with multiple possible event types (e.g. # person, motion, etc) and a single mp4 @@ -399,7 +399,7 @@ class NestMediaSource(MediaSource): # Browse a specific event if not (single_clip := clips.get(media_id.event_token)): raise BrowseError( - "Unable to find event with identiifer: %s" % item.identifier + f"Unable to find event with identiifer: {item.identifier}" ) return _browse_clip_preview(media_id, device, single_clip) @@ -419,7 +419,7 @@ class NestMediaSource(MediaSource): # Browse a specific event if not (single_image := images.get(media_id.event_token)): raise BrowseError( - "Unable to find event with identiifer: %s" % item.identifier + f"Unable to find event with identiifer: {item.identifier}" ) return _browse_image_event(media_id, device, single_image) diff --git a/homeassistant/components/netio/switch.py b/homeassistant/components/netio/switch.py index 0f0c85c1720..4cc77e44ec4 100644 --- a/homeassistant/components/netio/switch.py +++ b/homeassistant/components/netio/switch.py @@ -165,7 +165,7 @@ class NetioSwitch(SwitchEntity): def _set(self, value): val = list("uuuu") val[int(self.outlet) - 1] = "1" if value else "0" - self.netio.get("port list %s" % "".join(val)) + self.netio.get("port list {}".format("".join(val))) self.netio.states[int(self.outlet) - 1] = value self.schedule_update_ha_state() diff --git a/homeassistant/components/nextdns/__init__.py b/homeassistant/components/nextdns/__init__.py index 389173a2694..c7e4a0842fb 100644 --- a/homeassistant/components/nextdns/__init__.py +++ b/homeassistant/components/nextdns/__init__.py @@ -4,31 +4,15 @@ from __future__ import annotations import asyncio from datetime import timedelta -import logging -from typing import TypeVar from aiohttp.client_exceptions import ClientConnectorError -from nextdns import ( - AnalyticsDnssec, - AnalyticsEncryption, - AnalyticsIpVersions, - AnalyticsProtocols, - AnalyticsStatus, - ApiError, - ConnectionStatus, - InvalidApiKeyError, - NextDns, - Settings, -) -from nextdns.model import NextDnsData +from nextdns import ApiError, NextDns from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import ( ATTR_CONNECTION, @@ -44,104 +28,16 @@ from .const import ( UPDATE_INTERVAL_CONNECTION, UPDATE_INTERVAL_SETTINGS, ) - -CoordinatorDataT = TypeVar("CoordinatorDataT", bound=NextDnsData) - - -class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS data API.""" - - def __init__( - self, - hass: HomeAssistant, - nextdns: NextDns, - profile_id: str, - update_interval: timedelta, - ) -> None: - """Initialize.""" - self.nextdns = nextdns - self.profile_id = profile_id - self.profile_name = nextdns.get_profile_name(profile_id) - self.device_info = DeviceInfo( - configuration_url=f"https://my.nextdns.io/{profile_id}/setup", - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, str(profile_id))}, - manufacturer="NextDNS Inc.", - name=self.profile_name, - ) - - super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval) - - async def _async_update_data(self) -> CoordinatorDataT: - """Update data via internal method.""" - try: - async with asyncio.timeout(10): - return await self._async_update_data_internal() - except (ApiError, ClientConnectorError, InvalidApiKeyError) as err: - raise UpdateFailed(err) from err - - async def _async_update_data_internal(self) -> CoordinatorDataT: - """Update data via library.""" - raise NotImplementedError("Update method not implemented") - - -class NextDnsStatusUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsStatus]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics status data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsStatus: - """Update data via library.""" - return await self.nextdns.get_analytics_status(self.profile_id) - - -class NextDnsDnssecUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsDnssec]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics Dnssec data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsDnssec: - """Update data via library.""" - return await self.nextdns.get_analytics_dnssec(self.profile_id) - - -class NextDnsEncryptionUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsEncryption]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics encryption data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsEncryption: - """Update data via library.""" - return await self.nextdns.get_analytics_encryption(self.profile_id) - - -class NextDnsIpVersionsUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsIpVersions]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics IP versions data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsIpVersions: - """Update data via library.""" - return await self.nextdns.get_analytics_ip_versions(self.profile_id) - - -class NextDnsProtocolsUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsProtocols]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS analytics protocols data from API.""" - - async def _async_update_data_internal(self) -> AnalyticsProtocols: - """Update data via library.""" - return await self.nextdns.get_analytics_protocols(self.profile_id) - - -class NextDnsSettingsUpdateCoordinator(NextDnsUpdateCoordinator[Settings]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS connection data from API.""" - - async def _async_update_data_internal(self) -> Settings: - """Update data via library.""" - return await self.nextdns.get_settings(self.profile_id) - - -class NextDnsConnectionUpdateCoordinator(NextDnsUpdateCoordinator[ConnectionStatus]): # pylint: disable=hass-enforce-coordinator-module - """Class to manage fetching NextDNS connection data from API.""" - - async def _async_update_data_internal(self) -> ConnectionStatus: - """Update data via library.""" - return await self.nextdns.connection_status(self.profile_id) - - -_LOGGER = logging.getLogger(__name__) +from .coordinator import ( + NextDnsConnectionUpdateCoordinator, + NextDnsDnssecUpdateCoordinator, + NextDnsEncryptionUpdateCoordinator, + NextDnsIpVersionsUpdateCoordinator, + NextDnsProtocolsUpdateCoordinator, + NextDnsSettingsUpdateCoordinator, + NextDnsStatusUpdateCoordinator, + NextDnsUpdateCoordinator, +) PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR, Platform.SWITCH] COORDINATORS: list[tuple[str, type[NextDnsUpdateCoordinator], timedelta]] = [ diff --git a/homeassistant/components/nextdns/binary_sensor.py b/homeassistant/components/nextdns/binary_sensor.py index f6860586808..1bb79cf4fce 100644 --- a/homeassistant/components/nextdns/binary_sensor.py +++ b/homeassistant/components/nextdns/binary_sensor.py @@ -19,8 +19,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import CoordinatorDataT, NextDnsConnectionUpdateCoordinator from .const import ATTR_CONNECTION, DOMAIN +from .coordinator import CoordinatorDataT, NextDnsConnectionUpdateCoordinator PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/nextdns/button.py b/homeassistant/components/nextdns/button.py index d74152248a5..d61c953f260 100644 --- a/homeassistant/components/nextdns/button.py +++ b/homeassistant/components/nextdns/button.py @@ -9,8 +9,8 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import NextDnsStatusUpdateCoordinator from .const import ATTR_STATUS, DOMAIN +from .coordinator import NextDnsStatusUpdateCoordinator PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/nextdns/coordinator.py b/homeassistant/components/nextdns/coordinator.py new file mode 100644 index 00000000000..cad1aeac070 --- /dev/null +++ b/homeassistant/components/nextdns/coordinator.py @@ -0,0 +1,124 @@ +"""NextDns coordinator.""" + +import asyncio +from datetime import timedelta +import logging +from typing import TypeVar + +from aiohttp.client_exceptions import ClientConnectorError +from nextdns import ( + AnalyticsDnssec, + AnalyticsEncryption, + AnalyticsIpVersions, + AnalyticsProtocols, + AnalyticsStatus, + ApiError, + ConnectionStatus, + InvalidApiKeyError, + NextDns, + Settings, +) +from nextdns.model import NextDnsData + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +CoordinatorDataT = TypeVar("CoordinatorDataT", bound=NextDnsData) + + +class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]): + """Class to manage fetching NextDNS data API.""" + + def __init__( + self, + hass: HomeAssistant, + nextdns: NextDns, + profile_id: str, + update_interval: timedelta, + ) -> None: + """Initialize.""" + self.nextdns = nextdns + self.profile_id = profile_id + self.profile_name = nextdns.get_profile_name(profile_id) + self.device_info = DeviceInfo( + configuration_url=f"https://my.nextdns.io/{profile_id}/setup", + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, str(profile_id))}, + manufacturer="NextDNS Inc.", + name=self.profile_name, + ) + + super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval) + + async def _async_update_data(self) -> CoordinatorDataT: + """Update data via internal method.""" + try: + async with asyncio.timeout(10): + return await self._async_update_data_internal() + except (ApiError, ClientConnectorError, InvalidApiKeyError) as err: + raise UpdateFailed(err) from err + + async def _async_update_data_internal(self) -> CoordinatorDataT: + """Update data via library.""" + raise NotImplementedError("Update method not implemented") + + +class NextDnsStatusUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsStatus]): + """Class to manage fetching NextDNS analytics status data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsStatus: + """Update data via library.""" + return await self.nextdns.get_analytics_status(self.profile_id) + + +class NextDnsDnssecUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsDnssec]): + """Class to manage fetching NextDNS analytics Dnssec data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsDnssec: + """Update data via library.""" + return await self.nextdns.get_analytics_dnssec(self.profile_id) + + +class NextDnsEncryptionUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsEncryption]): + """Class to manage fetching NextDNS analytics encryption data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsEncryption: + """Update data via library.""" + return await self.nextdns.get_analytics_encryption(self.profile_id) + + +class NextDnsIpVersionsUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsIpVersions]): + """Class to manage fetching NextDNS analytics IP versions data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsIpVersions: + """Update data via library.""" + return await self.nextdns.get_analytics_ip_versions(self.profile_id) + + +class NextDnsProtocolsUpdateCoordinator(NextDnsUpdateCoordinator[AnalyticsProtocols]): + """Class to manage fetching NextDNS analytics protocols data from API.""" + + async def _async_update_data_internal(self) -> AnalyticsProtocols: + """Update data via library.""" + return await self.nextdns.get_analytics_protocols(self.profile_id) + + +class NextDnsSettingsUpdateCoordinator(NextDnsUpdateCoordinator[Settings]): + """Class to manage fetching NextDNS connection data from API.""" + + async def _async_update_data_internal(self) -> Settings: + """Update data via library.""" + return await self.nextdns.get_settings(self.profile_id) + + +class NextDnsConnectionUpdateCoordinator(NextDnsUpdateCoordinator[ConnectionStatus]): + """Class to manage fetching NextDNS connection data from API.""" + + async def _async_update_data_internal(self) -> ConnectionStatus: + """Update data via library.""" + return await self.nextdns.connection_status(self.profile_id) diff --git a/homeassistant/components/nextdns/manifest.json b/homeassistant/components/nextdns/manifest.json index 611021d73e4..1e7145ef6d1 100644 --- a/homeassistant/components/nextdns/manifest.json +++ b/homeassistant/components/nextdns/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["nextdns"], "quality_scale": "platinum", - "requirements": ["nextdns==2.1.0"] + "requirements": ["nextdns==3.0.0"] } diff --git a/homeassistant/components/nextdns/sensor.py b/homeassistant/components/nextdns/sensor.py index 4357179cbdb..3ac2179ed31 100644 --- a/homeassistant/components/nextdns/sensor.py +++ b/homeassistant/components/nextdns/sensor.py @@ -26,7 +26,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import CoordinatorDataT, NextDnsUpdateCoordinator from .const import ( ATTR_DNSSEC, ATTR_ENCRYPTION, @@ -35,6 +34,7 @@ from .const import ( ATTR_STATUS, DOMAIN, ) +from .coordinator import CoordinatorDataT, NextDnsUpdateCoordinator PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/nextdns/switch.py b/homeassistant/components/nextdns/switch.py index 81bf8b4e8c6..dfb796efd8c 100644 --- a/homeassistant/components/nextdns/switch.py +++ b/homeassistant/components/nextdns/switch.py @@ -18,8 +18,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import CoordinatorDataT, NextDnsSettingsUpdateCoordinator from .const import ATTR_SETTINGS, DOMAIN +from .coordinator import CoordinatorDataT, NextDnsSettingsUpdateCoordinator PARALLEL_UPDATES = 1 diff --git a/homeassistant/components/nfandroidtv/notify.py b/homeassistant/components/nfandroidtv/notify.py index dd42a0ab10b..dd6b15400d9 100644 --- a/homeassistant/components/nfandroidtv/notify.py +++ b/homeassistant/components/nfandroidtv/notify.py @@ -19,6 +19,7 @@ from homeassistant.components.notify import ( ) from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -44,6 +45,7 @@ from .const import ( ATTR_POSITION, ATTR_TRANSPARENCY, DEFAULT_TIMEOUT, + DOMAIN, ) _LOGGER = logging.getLogger(__name__) @@ -133,21 +135,49 @@ class NFAndroidTVNotificationService(BaseNotificationService): "Invalid interrupt-value: %s", data.get(ATTR_INTERRUPT) ) if imagedata := data.get(ATTR_IMAGE): - image_file = self.load_file( - url=imagedata.get(ATTR_IMAGE_URL), - local_path=imagedata.get(ATTR_IMAGE_PATH), - username=imagedata.get(ATTR_IMAGE_USERNAME), - password=imagedata.get(ATTR_IMAGE_PASSWORD), - auth=imagedata.get(ATTR_IMAGE_AUTH), - ) + if isinstance(imagedata, str): + image_file = ( + self.load_file(url=imagedata) + if imagedata.startswith("http") + else self.load_file(local_path=imagedata) + ) + elif isinstance(imagedata, dict): + image_file = self.load_file( + url=imagedata.get(ATTR_IMAGE_URL), + local_path=imagedata.get(ATTR_IMAGE_PATH), + username=imagedata.get(ATTR_IMAGE_USERNAME), + password=imagedata.get(ATTR_IMAGE_PASSWORD), + auth=imagedata.get(ATTR_IMAGE_AUTH), + ) + else: + raise ServiceValidationError( + "Invalid image provided", + translation_domain=DOMAIN, + translation_key="invalid_notification_image", + translation_placeholders={"type": type(imagedata).__name__}, + ) if icondata := data.get(ATTR_ICON): - icon = self.load_file( - url=icondata.get(ATTR_ICON_URL), - local_path=icondata.get(ATTR_ICON_PATH), - username=icondata.get(ATTR_ICON_USERNAME), - password=icondata.get(ATTR_ICON_PASSWORD), - auth=icondata.get(ATTR_ICON_AUTH), - ) + if isinstance(icondata, str): + icondata = ( + self.load_file(url=icondata) + if icondata.startswith("http") + else self.load_file(local_path=icondata) + ) + elif isinstance(icondata, dict): + icon = self.load_file( + url=icondata.get(ATTR_ICON_URL), + local_path=icondata.get(ATTR_ICON_PATH), + username=icondata.get(ATTR_ICON_USERNAME), + password=icondata.get(ATTR_ICON_PASSWORD), + auth=icondata.get(ATTR_ICON_AUTH), + ) + else: + raise ServiceValidationError( + "Invalid Icon provided", + translation_domain=DOMAIN, + translation_key="invalid_notification_icon", + translation_placeholders={"type": type(icondata).__name__}, + ) self.notify.send( message, title=title, diff --git a/homeassistant/components/nfandroidtv/strings.json b/homeassistant/components/nfandroidtv/strings.json index cde02327712..e73fc68d66a 100644 --- a/homeassistant/components/nfandroidtv/strings.json +++ b/homeassistant/components/nfandroidtv/strings.json @@ -1,4 +1,12 @@ { + "exceptions": { + "invalid_notification_icon": { + "message": "Invalid icon data provided. Got {type}" + }, + "invalid_notification_image": { + "message": "Invalid image data provided. Got {type}" + } + }, "config": { "step": { "user": { diff --git a/homeassistant/components/ollama/const.py b/homeassistant/components/ollama/const.py index 853370066dc..e25ae1f0877 100644 --- a/homeassistant/components/ollama/const.py +++ b/homeassistant/components/ollama/const.py @@ -81,75 +81,86 @@ DEFAULT_MAX_HISTORY = 20 MAX_HISTORY_SECONDS = 60 * 60 # 1 hour MODEL_NAMES = [ # https://ollama.com/library - "gemma", - "llama2", - "mistral", - "mixtral", - "llava", - "neural-chat", - "codellama", - "dolphin-mixtral", - "qwen", - "llama2-uncensored", - "mistral-openorca", - "deepseek-coder", - "nous-hermes2", - "phi", - "orca-mini", - "dolphin-mistral", - "wizard-vicuna-uncensored", - "vicuna", - "tinydolphin", - "llama2-chinese", - "nomic-embed-text", - "openhermes", - "zephyr", - "tinyllama", - "openchat", - "wizardcoder", - "starcoder", - "phind-codellama", - "starcoder2", - "yi", - "orca2", - "falcon", - "wizard-math", - "dolphin-phi", - "starling-lm", - "nous-hermes", - "stable-code", - "medllama2", - "bakllava", - "codeup", - "wizardlm-uncensored", - "solar", - "everythinglm", - "sqlcoder", - "dolphincoder", - "nous-hermes2-mixtral", - "stable-beluga", - "yarn-mistral", - "stablelm2", - "samantha-mistral", - "meditron", - "stablelm-zephyr", - "magicoder", - "yarn-llama2", - "llama-pro", - "deepseek-llm", - "wizard-vicuna", - "codebooga", - "mistrallite", - "all-minilm", - "nexusraven", - "open-orca-platypus2", - "goliath", - "notux", - "megadolphin", "alfred", - "xwinlm", - "wizardlm", + "all-minilm", + "bakllava", + "codebooga", + "codegemma", + "codellama", + "codeqwen", + "codeup", + "command-r", + "command-r-plus", + "dbrx", + "deepseek-coder", + "deepseek-llm", + "dolphin-llama3", + "dolphin-mistral", + "dolphin-mixtral", + "dolphin-phi", + "dolphincoder", "duckdb-nsql", + "everythinglm", + "falcon", + "gemma", + "goliath", + "llama-pro", + "llama2", + "llama2-chinese", + "llama2-uncensored", + "llama3", + "llava", + "magicoder", + "meditron", + "medllama2", + "megadolphin", + "mistral", + "mistral-openorca", + "mistrallite", + "mixtral", + "mxbai-embed-large", + "neural-chat", + "nexusraven", + "nomic-embed-text", "notus", + "notux", + "nous-hermes", + "nous-hermes2", + "nous-hermes2-mixtral", + "open-orca-platypus2", + "openchat", + "openhermes", + "orca-mini", + "orca2", + "phi", + "phi3", + "phind-codellama", + "qwen", + "samantha-mistral", + "snowflake-arctic-embed", + "solar", + "sqlcoder", + "stable-beluga", + "stable-code", + "stablelm-zephyr", + "stablelm2", + "starcoder", + "starcoder2", + "starling-lm", + "tinydolphin", + "tinyllama", + "vicuna", + "wizard-math", + "wizard-vicuna", + "wizard-vicuna-uncensored", + "wizardcoder", + "wizardlm", + "wizardlm-uncensored", + "wizardlm2", + "xwinlm", + "yarn-llama2", + "yarn-mistral", + "yi", + "zephyr", ] DEFAULT_MODEL = "llama2:latest" diff --git a/homeassistant/components/onewire/binary_sensor.py b/homeassistant/components/onewire/binary_sensor.py index fea78fd3760..d2e66609103 100644 --- a/homeassistant/components/onewire/binary_sensor.py +++ b/homeassistant/components/onewire/binary_sensor.py @@ -36,33 +36,33 @@ class OneWireBinarySensorEntityDescription( DEVICE_BINARY_SENSORS: dict[str, tuple[OneWireBinarySensorEntityDescription, ...]] = { "12": tuple( OneWireBinarySensorEntityDescription( - key=f"sensed.{id}", + key=f"sensed.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="sensed_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ), "29": tuple( OneWireBinarySensorEntityDescription( - key=f"sensed.{id}", + key=f"sensed.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="sensed_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_7 + for device_key in DEVICE_KEYS_0_7 ), "3A": tuple( OneWireBinarySensorEntityDescription( - key=f"sensed.{id}", + key=f"sensed.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="sensed_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ), "EF": (), # "HobbyBoard": special } @@ -71,15 +71,15 @@ DEVICE_BINARY_SENSORS: dict[str, tuple[OneWireBinarySensorEntityDescription, ... HOBBYBOARD_EF: dict[str, tuple[OneWireBinarySensorEntityDescription, ...]] = { "HB_HUB": tuple( OneWireBinarySensorEntityDescription( - key=f"hub/short.{id}", + key=f"hub/short.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, entity_category=EntityCategory.DIAGNOSTIC, device_class=BinarySensorDeviceClass.PROBLEM, translation_key="hub_short_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ), } diff --git a/homeassistant/components/onewire/sensor.py b/homeassistant/components/onewire/sensor.py index d32afce7fa9..46f18842d51 100644 --- a/homeassistant/components/onewire/sensor.py +++ b/homeassistant/components/onewire/sensor.py @@ -233,14 +233,14 @@ DEVICE_SENSORS: dict[str, tuple[OneWireSensorEntityDescription, ...]] = { "42": (SIMPLE_TEMPERATURE_SENSOR_DESCRIPTION,), "1D": tuple( OneWireSensorEntityDescription( - key=f"counter.{id}", + key=f"counter.{device_key}", native_unit_of_measurement="count", read_mode=READ_MODE_INT, state_class=SensorStateClass.TOTAL_INCREASING, translation_key="counter_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ), } @@ -273,15 +273,15 @@ HOBBYBOARD_EF: dict[str, tuple[OneWireSensorEntityDescription, ...]] = { ), "HB_MOISTURE_METER": tuple( OneWireSensorEntityDescription( - key=f"moisture/sensor.{id}", + key=f"moisture/sensor.{device_key}", device_class=SensorDeviceClass.PRESSURE, native_unit_of_measurement=UnitOfPressure.CBAR, read_mode=READ_MODE_FLOAT, state_class=SensorStateClass.MEASUREMENT, translation_key="moisture_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ), } diff --git a/homeassistant/components/onewire/switch.py b/homeassistant/components/onewire/switch.py index cdf1315394e..41276218540 100644 --- a/homeassistant/components/onewire/switch.py +++ b/homeassistant/components/onewire/switch.py @@ -40,23 +40,23 @@ DEVICE_SWITCHES: dict[str, tuple[OneWireEntityDescription, ...]] = { "12": tuple( [ OneWireSwitchEntityDescription( - key=f"PIO.{id}", + key=f"PIO.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="pio_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ] + [ OneWireSwitchEntityDescription( - key=f"latch.{id}", + key=f"latch.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="latch_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ] ), "26": ( @@ -71,34 +71,34 @@ DEVICE_SWITCHES: dict[str, tuple[OneWireEntityDescription, ...]] = { "29": tuple( [ OneWireSwitchEntityDescription( - key=f"PIO.{id}", + key=f"PIO.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="pio_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_7 + for device_key in DEVICE_KEYS_0_7 ] + [ OneWireSwitchEntityDescription( - key=f"latch.{id}", + key=f"latch.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="latch_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_7 + for device_key in DEVICE_KEYS_0_7 ] ), "3A": tuple( OneWireSwitchEntityDescription( - key=f"PIO.{id}", + key=f"PIO.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, translation_key="pio_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_A_B + for device_key in DEVICE_KEYS_A_B ), "EF": (), # "HobbyBoard": special } @@ -108,37 +108,37 @@ DEVICE_SWITCHES: dict[str, tuple[OneWireEntityDescription, ...]] = { HOBBYBOARD_EF: dict[str, tuple[OneWireEntityDescription, ...]] = { "HB_HUB": tuple( OneWireSwitchEntityDescription( - key=f"hub/branch.{id}", + key=f"hub/branch.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, entity_category=EntityCategory.CONFIG, translation_key="hub_branch_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ), "HB_MOISTURE_METER": tuple( [ OneWireSwitchEntityDescription( - key=f"moisture/is_leaf.{id}", + key=f"moisture/is_leaf.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, entity_category=EntityCategory.CONFIG, translation_key="leaf_sensor_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ] + [ OneWireSwitchEntityDescription( - key=f"moisture/is_moisture.{id}", + key=f"moisture/is_moisture.{device_key}", entity_registry_enabled_default=False, read_mode=READ_MODE_BOOL, entity_category=EntityCategory.CONFIG, translation_key="moisture_sensor_id", - translation_placeholders={"id": str(id)}, + translation_placeholders={"id": str(device_key)}, ) - for id in DEVICE_KEYS_0_3 + for device_key in DEVICE_KEYS_0_3 ] ), } diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index c0503e6e850..7575443c793 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -442,6 +442,7 @@ class OnkyoDevice(MediaPlayerEntity): "output_color_schema": _tuple_get(values, 6), "output_color_depth": _tuple_get(values, 7), "picture_mode": _tuple_get(values, 8), + "dynamic_range": _tuple_get(values, 9), } self._attr_extra_state_attributes[ATTR_VIDEO_INFORMATION] = info else: diff --git a/homeassistant/components/osoenergy/__init__.py b/homeassistant/components/osoenergy/__init__.py index 48ea01e8bb8..20ff22cea23 100644 --- a/homeassistant/components/osoenergy/__init__.py +++ b/homeassistant/components/osoenergy/__init__.py @@ -16,18 +16,25 @@ from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity from .const import DOMAIN -_T = TypeVar( - "_T", OSOEnergyBinarySensorData, OSOEnergySensorData, OSOEnergyWaterHeaterData +_OSOEnergyT = TypeVar( + "_OSOEnergyT", + OSOEnergyBinarySensorData, + OSOEnergySensorData, + OSOEnergyWaterHeaterData, ) +MANUFACTURER = "OSO Energy" PLATFORMS = [ + Platform.SENSOR, Platform.WATER_HEATER, ] PLATFORM_LOOKUP = { + Platform.SENSOR: "sensor", Platform.WATER_HEATER: "water_heater", } @@ -70,13 +77,18 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -class OSOEnergyEntity(Entity, Generic[_T]): +class OSOEnergyEntity(Entity, Generic[_OSOEnergyT]): """Initiate OSO Energy Base Class.""" _attr_has_entity_name = True - def __init__(self, osoenergy: OSOEnergy, osoenergy_device: _T) -> None: + def __init__(self, osoenergy: OSOEnergy, entity_data: _OSOEnergyT) -> None: """Initialize the instance.""" self.osoenergy = osoenergy - self.device = osoenergy_device - self._attr_unique_id = osoenergy_device.device_id + self.entity_data = entity_data + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, entity_data.device_id)}, + manufacturer=MANUFACTURER, + model=entity_data.device_type, + name=entity_data.device_name, + ) diff --git a/homeassistant/components/osoenergy/sensor.py b/homeassistant/components/osoenergy/sensor.py new file mode 100644 index 00000000000..0be6ad83281 --- /dev/null +++ b/homeassistant/components/osoenergy/sensor.py @@ -0,0 +1,151 @@ +"""Support for OSO Energy sensors.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from apyosoenergyapi import OSOEnergy +from apyosoenergyapi.helper.const import OSOEnergySensorData + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfVolume +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import OSOEnergyEntity +from .const import DOMAIN + + +@dataclass(frozen=True, kw_only=True) +class OSOEnergySensorEntityDescription(SensorEntityDescription): + """Class describing OSO Energy heater sensor entities.""" + + value_fn: Callable[[OSOEnergy], StateType] + + +SENSOR_TYPES: dict[str, OSOEnergySensorEntityDescription] = { + "heater_mode": OSOEnergySensorEntityDescription( + key="heater_mode", + translation_key="heater_mode", + device_class=SensorDeviceClass.ENUM, + options=[ + "auto", + "manual", + "off", + "legionella", + "powersave", + "extraenergy", + "voltage", + "ffr", + ], + value_fn=lambda entity_data: entity_data.state.lower(), + ), + "optimization_mode": OSOEnergySensorEntityDescription( + key="optimization_mode", + translation_key="optimization_mode", + device_class=SensorDeviceClass.ENUM, + options=["off", "oso", "gridcompany", "smartcompany", "advanced"], + value_fn=lambda entity_data: entity_data.state.lower(), + ), + "power_load": OSOEnergySensorEntityDescription( + key="power_load", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda entity_data: entity_data.state, + ), + "tapping_capacity": OSOEnergySensorEntityDescription( + key="tapping_capacity", + translation_key="tapping_capacity", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda entity_data: entity_data.state, + ), + "capacity_mixed_water_40": OSOEnergySensorEntityDescription( + key="capacity_mixed_water_40", + translation_key="capacity_mixed_water_40", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), + "v40_min": OSOEnergySensorEntityDescription( + key="v40_min", + translation_key="v40_min", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), + "v40_level_min": OSOEnergySensorEntityDescription( + key="v40_level_min", + translation_key="v40_level_min", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), + "v40_level_max": OSOEnergySensorEntityDescription( + key="v40_level_max", + translation_key="v40_level_max", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), + "volume": OSOEnergySensorEntityDescription( + key="volume", + device_class=SensorDeviceClass.VOLUME, + native_unit_of_measurement=UnitOfVolume.LITERS, + value_fn=lambda entity_data: entity_data.state, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up OSO Energy sensor.""" + osoenergy = hass.data[DOMAIN][entry.entry_id] + devices = osoenergy.session.device_list.get("sensor") + entities = [] + if devices: + for dev in devices: + sensor_type = dev.osoEnergyType.lower() + if sensor_type in SENSOR_TYPES: + entities.append( + OSOEnergySensor(osoenergy, SENSOR_TYPES[sensor_type], dev) + ) + + async_add_entities(entities, True) + + +class OSOEnergySensor(OSOEnergyEntity[OSOEnergySensorData], SensorEntity): + """OSO Energy Sensor Entity.""" + + entity_description: OSOEnergySensorEntityDescription + + def __init__( + self, + instance: OSOEnergy, + description: OSOEnergySensorEntityDescription, + entity_data: OSOEnergySensorData, + ) -> None: + """Initialize the OSO Energy sensor.""" + super().__init__(instance, entity_data) + + device_id = entity_data.device_id + self._attr_unique_id = f"{device_id}_{description.key}" + self.entity_description = description + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.entity_data) + + async def async_update(self) -> None: + """Update all data for OSO Energy.""" + await self.osoenergy.session.update_data() + self.entity_data = await self.osoenergy.sensor.get_sensor(self.entity_data) diff --git a/homeassistant/components/osoenergy/strings.json b/homeassistant/components/osoenergy/strings.json index a45482bf030..5313f1d6565 100644 --- a/homeassistant/components/osoenergy/strings.json +++ b/homeassistant/components/osoenergy/strings.json @@ -17,13 +17,56 @@ } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } + }, + "entity": { + "sensor": { + "tapping_capacity": { + "name": "Tapping capacity" + }, + "capacity_mixed_water_40": { + "name": "Capacity mixed water 40°C" + }, + "v40_min": { + "name": "Mixed water at 40°C" + }, + "v40_level_min": { + "name": "Minimum level of mixed water at 40°C" + }, + "v40_level_max": { + "name": "Maximum level of mixed water at 40°C" + }, + "heater_mode": { + "name": "Heater mode", + "state": { + "auto": "Auto", + "extraenergy": "Extra energy", + "ffr": "Fast frequency reserve", + "legionella": "Legionella", + "manual": "Manual", + "off": "Off", + "powersave": "Power save", + "voltage": "Voltage" + } + }, + "optimization_mode": { + "name": "Optimization mode", + "state": { + "advanced": "Advanced", + "gridcompany": "Grid company", + "off": "Off", + "oso": "OSO", + "smartcompany": "Smart company" + } + }, + "profile": { + "name": "Profile local" + } + } } } diff --git a/homeassistant/components/osoenergy/water_heater.py b/homeassistant/components/osoenergy/water_heater.py index eaf54a9f9a4..b7fb2ba16e6 100644 --- a/homeassistant/components/osoenergy/water_heater.py +++ b/homeassistant/components/osoenergy/water_heater.py @@ -2,6 +2,7 @@ from typing import Any +from apyosoenergyapi import OSOEnergy from apyosoenergyapi.helper.const import OSOEnergyWaterHeaterData from homeassistant.components.water_heater import ( @@ -15,7 +16,6 @@ from homeassistant.components.water_heater import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OSOEnergyEntity @@ -34,9 +34,6 @@ CURRENT_OPERATION_MAP: dict[str, Any] = { "extraenergy": STATE_HIGH_DEMAND, }, } -HEATER_MIN_TEMP = 10 -HEATER_MAX_TEMP = 80 -MANUFACTURER = "OSO Energy" async def async_setup_entry( @@ -59,30 +56,29 @@ class OSOEnergyWaterHeater( _attr_supported_features = WaterHeaterEntityFeature.TARGET_TEMPERATURE _attr_temperature_unit = UnitOfTemperature.CELSIUS - @property - def device_info(self) -> DeviceInfo: - """Return device information.""" - return DeviceInfo( - identifiers={(DOMAIN, self.device.device_id)}, - manufacturer=MANUFACTURER, - model=self.device.device_type, - name=self.device.device_name, - ) + def __init__( + self, + instance: OSOEnergy, + entity_data: OSOEnergyWaterHeaterData, + ) -> None: + """Initialize the OSO Energy water heater.""" + super().__init__(instance, entity_data) + self._attr_unique_id = entity_data.device_id @property def available(self) -> bool: """Return if the device is available.""" - return self.device.available + return self.entity_data.available @property def current_operation(self) -> str: """Return current operation.""" - status = self.device.current_operation + status = self.entity_data.current_operation if status == "off": return STATE_OFF - optimization_mode = self.device.optimization_mode.lower() - heater_mode = self.device.heater_mode.lower() + optimization_mode = self.entity_data.optimization_mode.lower() + heater_mode = self.entity_data.heater_mode.lower() if optimization_mode in CURRENT_OPERATION_MAP: return CURRENT_OPERATION_MAP[optimization_mode].get( heater_mode, STATE_ELECTRIC @@ -93,49 +89,51 @@ class OSOEnergyWaterHeater( @property def current_temperature(self) -> float: """Return the current temperature of the heater.""" - return self.device.current_temperature + return self.entity_data.current_temperature @property def target_temperature(self) -> float: """Return the temperature we try to reach.""" - return self.device.target_temperature + return self.entity_data.target_temperature @property def target_temperature_high(self) -> float: """Return the temperature we try to reach.""" - return self.device.target_temperature_high + return self.entity_data.target_temperature_high @property def target_temperature_low(self) -> float: """Return the temperature we try to reach.""" - return self.device.target_temperature_low + return self.entity_data.target_temperature_low @property def min_temp(self) -> float: """Return the minimum temperature.""" - return self.device.min_temperature + return self.entity_data.min_temperature @property def max_temp(self) -> float: """Return the maximum temperature.""" - return self.device.max_temperature + return self.entity_data.max_temperature async def async_turn_on(self, **kwargs) -> None: """Turn on hotwater.""" - await self.osoenergy.hotwater.turn_on(self.device, True) + await self.osoenergy.hotwater.turn_on(self.entity_data, True) async def async_turn_off(self, **kwargs) -> None: """Turn off hotwater.""" - await self.osoenergy.hotwater.turn_off(self.device, True) + await self.osoenergy.hotwater.turn_off(self.entity_data, True) async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" target_temperature = int(kwargs.get("temperature", self.target_temperature)) profile = [target_temperature] * 24 - await self.osoenergy.hotwater.set_profile(self.device, profile) + await self.osoenergy.hotwater.set_profile(self.entity_data, profile) async def async_update(self) -> None: """Update all Node data from Hive.""" await self.osoenergy.session.update_data() - self.device = await self.osoenergy.hotwater.get_water_heater(self.device) + self.entity_data = await self.osoenergy.hotwater.get_water_heater( + self.entity_data + ) diff --git a/homeassistant/components/ovo_energy/__init__.py b/homeassistant/components/ovo_energy/__init__.py index e0c2b77664a..d207f3161f4 100644 --- a/homeassistant/components/ovo_energy/__init__.py +++ b/homeassistant/components/ovo_energy/__init__.py @@ -7,13 +7,14 @@ from datetime import timedelta import logging import aiohttp +from ovoenergy import OVOEnergy from ovoenergy.models import OVODailyUsage -from ovoenergy.ovoenergy import OVOEnergy from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, @@ -32,29 +33,35 @@ PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up OVO Energy from a config entry.""" - client = OVOEnergy() + client = OVOEnergy( + client_session=async_get_clientsession(hass), + ) + + if custom_account := entry.data.get(CONF_ACCOUNT) is not None: + client.custom_account_id = custom_account try: - authenticated = await client.authenticate( + if not await client.authenticate( entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], - entry.data[CONF_ACCOUNT], - ) + ): + raise ConfigEntryAuthFailed + + await client.bootstrap_accounts() except aiohttp.ClientError as exception: _LOGGER.warning(exception) raise ConfigEntryNotReady from exception - if not authenticated: - raise ConfigEntryAuthFailed - async def async_update_data() -> OVODailyUsage: """Fetch data from OVO Energy.""" + if custom_account := entry.data.get(CONF_ACCOUNT) is not None: + client.custom_account_id = custom_account + async with asyncio.timeout(10): try: authenticated = await client.authenticate( entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], - entry.data[CONF_ACCOUNT], ) except aiohttp.ClientError as exception: raise UpdateFailed(exception) from exception diff --git a/homeassistant/components/ovo_energy/config_flow.py b/homeassistant/components/ovo_energy/config_flow.py index 41c64913764..87d53e5fbf9 100644 --- a/homeassistant/components/ovo_energy/config_flow.py +++ b/homeassistant/components/ovo_energy/config_flow.py @@ -6,11 +6,12 @@ from collections.abc import Mapping from typing import Any import aiohttp -from ovoenergy.ovoenergy import OVOEnergy +from ovoenergy import OVOEnergy import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_ACCOUNT, DOMAIN @@ -41,13 +42,19 @@ class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initiated by the user.""" errors = {} if user_input is not None: - client = OVOEnergy() + client = OVOEnergy( + client_session=async_get_clientsession(self.hass), + ) + + if custom_account := user_input.get(CONF_ACCOUNT) is not None: + client.custom_account_id = custom_account + try: authenticated = await client.authenticate( user_input[CONF_USERNAME], user_input[CONF_PASSWORD], - user_input.get(CONF_ACCOUNT, None), ) + await client.bootstrap_accounts() except aiohttp.ClientError: errors["base"] = "cannot_connect" else: @@ -86,10 +93,17 @@ class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {CONF_USERNAME: self.username} if user_input is not None and user_input.get(CONF_PASSWORD) is not None: - client = OVOEnergy() + client = OVOEnergy( + client_session=async_get_clientsession(self.hass), + ) + + if self.account is not None: + client.custom_account_id = self.account + try: authenticated = await client.authenticate( - self.username, user_input[CONF_PASSWORD], self.account + self.username, + user_input[CONF_PASSWORD], ) except aiohttp.ClientError: errors["base"] = "connection_error" diff --git a/homeassistant/components/ovo_energy/manifest.json b/homeassistant/components/ovo_energy/manifest.json index 9435958f1fe..af4a313206e 100644 --- a/homeassistant/components/ovo_energy/manifest.json +++ b/homeassistant/components/ovo_energy/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["ovoenergy"], - "requirements": ["ovoenergy==1.3.1"] + "requirements": ["ovoenergy==2.0.0"] } diff --git a/homeassistant/components/ovo_energy/sensor.py b/homeassistant/components/ovo_energy/sensor.py index d5384837e9c..5b16e8cdef5 100644 --- a/homeassistant/components/ovo_energy/sensor.py +++ b/homeassistant/components/ovo_energy/sensor.py @@ -7,8 +7,8 @@ import dataclasses from datetime import datetime, timedelta from typing import Final +from ovoenergy import OVOEnergy from ovoenergy.models import OVODailyUsage -from ovoenergy.ovoenergy import OVOEnergy from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/homeassistant/components/pegel_online/manifest.json b/homeassistant/components/pegel_online/manifest.json index d193fd7487a..d51278d0c1b 100644 --- a/homeassistant/components/pegel_online/manifest.json +++ b/homeassistant/components/pegel_online/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aiopegelonline"], - "requirements": ["aiopegelonline==0.0.9"] + "requirements": ["aiopegelonline==0.0.10"] } diff --git a/homeassistant/components/plex/manifest.json b/homeassistant/components/plex/manifest.json index 85362371715..ff0ab39b150 100644 --- a/homeassistant/components/plex/manifest.json +++ b/homeassistant/components/plex/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["plexapi", "plexwebsocket"], "requirements": [ - "PlexAPI==4.15.11", + "PlexAPI==4.15.12", "plexauth==0.0.6", "plexwebsocket==0.0.14" ], diff --git a/homeassistant/components/plugwise/__init__.py b/homeassistant/components/plugwise/__init__.py index 28389ffa357..3140e518688 100644 --- a/homeassistant/components/plugwise/__init__.py +++ b/homeassistant/components/plugwise/__init__.py @@ -49,8 +49,16 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: def async_migrate_entity_entry(entry: er.RegistryEntry) -> dict[str, Any] | None: """Migrate Plugwise entity entries. - - Migrates unique ID from old relay switches to the new unique ID + - Migrates old unique ID's from old binary_sensors and switches to the new unique ID's """ + if entry.domain == Platform.BINARY_SENSOR and entry.unique_id.endswith( + "-slave_boiler_state" + ): + return { + "new_unique_id": entry.unique_id.replace( + "-slave_boiler_state", "-secondary_boiler_state" + ) + } if entry.domain == Platform.SWITCH and entry.unique_id.endswith("-plug"): return {"new_unique_id": entry.unique_id.replace("-plug", "-relay")} diff --git a/homeassistant/components/plugwise/binary_sensor.py b/homeassistant/components/plugwise/binary_sensor.py index d32ae94160f..01ebc736dbe 100644 --- a/homeassistant/components/plugwise/binary_sensor.py +++ b/homeassistant/components/plugwise/binary_sensor.py @@ -64,8 +64,8 @@ BINARY_SENSORS: tuple[PlugwiseBinarySensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, ), PlugwiseBinarySensorEntityDescription( - key="slave_boiler_state", - translation_key="slave_boiler_state", + key="secondary_boiler_state", + translation_key="secondary_boiler_state", entity_category=EntityCategory.DIAGNOSTIC, ), PlugwiseBinarySensorEntityDescription( diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 888f813760a..ada7d2d2533 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==0.37.1"], + "requirements": ["plugwise==0.37.3"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index 7d26f5a624c..ef2d6458441 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -48,7 +48,7 @@ "cooling_state": { "name": "[%key:component::climate::entity_component::_::state_attributes::hvac_action::state::cooling%]" }, - "slave_boiler_state": { + "secondary_boiler_state": { "name": "Secondary boiler state" }, "plugwise_notification": { diff --git a/homeassistant/components/qbittorrent/__init__.py b/homeassistant/components/qbittorrent/__init__.py index 7b1a38b7e31..84f080c4d49 100644 --- a/homeassistant/components/qbittorrent/__init__.py +++ b/homeassistant/components/qbittorrent/__init__.py @@ -1,29 +1,111 @@ """The qbittorrent component.""" import logging +from typing import Any from qbittorrent.client import LoginRequired from requests.exceptions import RequestException from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + ATTR_DEVICE_ID, CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL, Platform, ) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse +from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError +from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN +from .const import ( + DOMAIN, + SERVICE_GET_ALL_TORRENTS, + SERVICE_GET_TORRENTS, + STATE_ATTR_ALL_TORRENTS, + STATE_ATTR_TORRENTS, + TORRENT_FILTER, +) from .coordinator import QBittorrentDataCoordinator -from .helpers import setup_client +from .helpers import format_torrents, setup_client _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + PLATFORMS = [Platform.SENSOR] +CONF_ENTRY = "entry" + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up qBittorrent services.""" + + async def handle_get_torrents(service_call: ServiceCall) -> dict[str, Any] | None: + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get(service_call.data[ATTR_DEVICE_ID]) + + if device_entry is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_device", + translation_placeholders={ + "device_id": service_call.data[ATTR_DEVICE_ID] + }, + ) + + entry_id = None + + for key, value in device_entry.identifiers: + if key == DOMAIN: + entry_id = value + break + else: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_entry_id", + translation_placeholders={"device_id": entry_id or ""}, + ) + + coordinator: QBittorrentDataCoordinator = hass.data[DOMAIN][entry_id] + items = await coordinator.get_torrents(service_call.data[TORRENT_FILTER]) + info = format_torrents(items) + return { + STATE_ATTR_TORRENTS: info, + } + + hass.services.async_register( + DOMAIN, + SERVICE_GET_TORRENTS, + handle_get_torrents, + supports_response=SupportsResponse.ONLY, + ) + + async def handle_get_all_torrents( + service_call: ServiceCall, + ) -> dict[str, Any] | None: + torrents = {} + + for key, value in hass.data[DOMAIN].items(): + coordinator: QBittorrentDataCoordinator = value + items = await coordinator.get_torrents(service_call.data[TORRENT_FILTER]) + torrents[key] = format_torrents(items) + + return { + STATE_ATTR_ALL_TORRENTS: torrents, + } + + hass.services.async_register( + DOMAIN, + SERVICE_GET_ALL_TORRENTS, + handle_get_all_torrents, + supports_response=SupportsResponse.ONLY, + ) + + return True + async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up qBittorrent from a config entry.""" diff --git a/homeassistant/components/qbittorrent/const.py b/homeassistant/components/qbittorrent/const.py index d8fe2c012a3..73e29d06f40 100644 --- a/homeassistant/components/qbittorrent/const.py +++ b/homeassistant/components/qbittorrent/const.py @@ -7,6 +7,13 @@ DOMAIN: Final = "qbittorrent" DEFAULT_NAME = "qBittorrent" DEFAULT_URL = "http://127.0.0.1:8080" +STATE_ATTR_TORRENTS = "torrents" +STATE_ATTR_ALL_TORRENTS = "all_torrents" + STATE_UP_DOWN = "up_down" STATE_SEEDING = "seeding" STATE_DOWNLOADING = "downloading" + +SERVICE_GET_TORRENTS = "get_torrents" +SERVICE_GET_ALL_TORRENTS = "get_all_torrents" +TORRENT_FILTER = "torrent_filter" diff --git a/homeassistant/components/qbittorrent/coordinator.py b/homeassistant/components/qbittorrent/coordinator.py index 32ce4cf9711..850bcf15ca2 100644 --- a/homeassistant/components/qbittorrent/coordinator.py +++ b/homeassistant/components/qbittorrent/coordinator.py @@ -10,7 +10,7 @@ from qbittorrent import Client from qbittorrent.client import LoginRequired from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN @@ -19,11 +19,18 @@ _LOGGER = logging.getLogger(__name__) class QBittorrentDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): - """Coordinator for updating QBittorrent data.""" + """Coordinator for updating qBittorrent data.""" def __init__(self, hass: HomeAssistant, client: Client) -> None: """Initialize coordinator.""" self.client = client + # self.main_data: dict[str, int] = {} + self.total_torrents: dict[str, int] = {} + self.active_torrents: dict[str, int] = {} + self.inactive_torrents: dict[str, int] = {} + self.paused_torrents: dict[str, int] = {} + self.seeding_torrents: dict[str, int] = {} + self.started_torrents: dict[str, int] = {} super().__init__( hass, @@ -33,7 +40,21 @@ class QBittorrentDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) async def _async_update_data(self) -> dict[str, Any]: + """Async method to update QBittorrent data.""" try: return await self.hass.async_add_executor_job(self.client.sync_main_data) except LoginRequired as exc: - raise ConfigEntryError("Invalid authentication") from exc + raise HomeAssistantError(str(exc)) from exc + + async def get_torrents(self, torrent_filter: str) -> list[dict[str, Any]]: + """Async method to get QBittorrent torrents.""" + try: + torrents = await self.hass.async_add_executor_job( + lambda: self.client.torrents(filter=torrent_filter) + ) + except LoginRequired as exc: + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="login_error" + ) from exc + + return torrents diff --git a/homeassistant/components/qbittorrent/helpers.py b/homeassistant/components/qbittorrent/helpers.py index b9c29675473..bbe53765f8b 100644 --- a/homeassistant/components/qbittorrent/helpers.py +++ b/homeassistant/components/qbittorrent/helpers.py @@ -1,5 +1,8 @@ """Helper functions for qBittorrent.""" +from datetime import UTC, datetime +from typing import Any + from qbittorrent.client import Client @@ -10,3 +13,48 @@ def setup_client(url: str, username: str, password: str, verify_ssl: bool) -> Cl # Get an arbitrary attribute to test if connection succeeds client.get_alternative_speed_status() return client + + +def seconds_to_hhmmss(seconds) -> str: + """Convert seconds to HH:MM:SS format.""" + if seconds == 8640000: + return "None" + + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}" + + +def format_unix_timestamp(timestamp) -> str: + """Format a UNIX timestamp to a human-readable date.""" + dt_object = datetime.fromtimestamp(timestamp, tz=UTC) + return dt_object.isoformat() + + +def format_progress(torrent) -> str: + """Format the progress of a torrent.""" + progress = torrent["progress"] + progress = float(progress) * 100 + return f"{progress:.2f}" + + +def format_torrents(torrents: list[dict[str, Any]]) -> dict[str, dict[str, Any]]: + """Format a list of torrents.""" + value = {} + for torrent in torrents: + value[torrent["name"]] = format_torrent(torrent) + + return value + + +def format_torrent(torrent) -> dict[str, Any]: + """Format a single torrent.""" + value = {} + value["id"] = torrent["hash"] + value["added_date"] = format_unix_timestamp(torrent["added_on"]) + value["percent_done"] = format_progress(torrent) + value["status"] = torrent["state"] + value["eta"] = seconds_to_hhmmss(torrent["eta"]) + value["ratio"] = "{:.2f}".format(float(torrent["ratio"])) + + return value diff --git a/homeassistant/components/qbittorrent/icons.json b/homeassistant/components/qbittorrent/icons.json index bb458c751e1..68fc1020dae 100644 --- a/homeassistant/components/qbittorrent/icons.json +++ b/homeassistant/components/qbittorrent/icons.json @@ -8,5 +8,9 @@ "default": "mdi:cloud-upload" } } + }, + "services": { + "get_torrents": "mdi:file-arrow-up-down-outline", + "get_all_torrents": "mdi:file-arrow-up-down-outline" } } diff --git a/homeassistant/components/qbittorrent/services.yaml b/homeassistant/components/qbittorrent/services.yaml new file mode 100644 index 00000000000..f7fc6b95f64 --- /dev/null +++ b/homeassistant/components/qbittorrent/services.yaml @@ -0,0 +1,35 @@ +get_torrents: + fields: + device_id: + required: true + selector: + device: + integration: qbittorrent + torrent_filter: + required: true + example: "all" + default: "all" + selector: + select: + options: + - "active" + - "inactive" + - "paused" + - "all" + - "seeding" + - "started" +get_all_torrents: + fields: + torrent_filter: + required: true + example: "all" + default: "all" + selector: + select: + options: + - "active" + - "inactive" + - "paused" + - "all" + - "seeding" + - "started" diff --git a/homeassistant/components/qbittorrent/strings.json b/homeassistant/components/qbittorrent/strings.json index 8b20a3354dd..5376e929429 100644 --- a/homeassistant/components/qbittorrent/strings.json +++ b/homeassistant/components/qbittorrent/strings.json @@ -48,5 +48,42 @@ "name": "All torrents" } } + }, + "services": { + "get_torrents": { + "name": "Get torrents", + "description": "Gets a list of current torrents", + "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "Which service to grab the list from" + }, + "torrent_filter": { + "name": "Torrent filter", + "description": "What kind of torrents you want to return, such as All or Active." + } + } + }, + "get_all_torrents": { + "name": "Get all torrents", + "description": "Gets a list of current torrents from all instances of qBittorrent", + "fields": { + "torrent_filter": { + "name": "Torrent filter", + "description": "What kind of torrents you want to return, such as All or Active." + } + } + } + }, + "exceptions": { + "invalid_device": { + "message": "No device with id {device_id} was found" + }, + "invalid_entry_id": { + "message": "No entry with id {device_id} was found" + }, + "login_error": { + "message": "A login error occured. Please check you username and password." + } } } diff --git a/homeassistant/components/recorder/services.py b/homeassistant/components/recorder/services.py index b4d719a9481..2be02fe8091 100644 --- a/homeassistant/components/recorder/services.py +++ b/homeassistant/components/recorder/services.py @@ -7,6 +7,7 @@ from typing import cast import voluptuous as vol +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant, ServiceCall, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entityfilter import generate_filter @@ -36,15 +37,28 @@ SERVICE_PURGE_SCHEMA = vol.Schema( ATTR_DOMAINS = "domains" ATTR_ENTITY_GLOBS = "entity_globs" -SERVICE_PURGE_ENTITIES_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_DOMAINS, default=[]): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(ATTR_ENTITY_GLOBS, default=[]): vol.All( - cv.ensure_list, [cv.string] +SERVICE_PURGE_ENTITIES_SCHEMA = vol.All( + vol.Schema( + { + vol.Optional(ATTR_ENTITY_ID, default=[]): cv.entity_ids, + vol.Optional(ATTR_DOMAINS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_ENTITY_GLOBS, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + vol.Optional(ATTR_KEEP_DAYS, default=0): cv.positive_int, + } + ), + vol.Any( + vol.Schema({vol.Required(ATTR_ENTITY_ID): vol.IsTrue()}, extra=vol.ALLOW_EXTRA), + vol.Schema({vol.Required(ATTR_DOMAINS): vol.IsTrue()}, extra=vol.ALLOW_EXTRA), + vol.Schema( + {vol.Required(ATTR_ENTITY_GLOBS): vol.IsTrue()}, extra=vol.ALLOW_EXTRA ), - vol.Optional(ATTR_KEEP_DAYS, default=0): cv.positive_int, - } -).extend(cv.ENTITY_SERVICE_FIELDS) + msg="At least one of entity_id, domains, or entity_globs must have a value", + ), +) SERVICE_ENABLE_SCHEMA = vol.Schema({}) SERVICE_DISABLE_SCHEMA = vol.Schema({}) diff --git a/homeassistant/components/recorder/services.yaml b/homeassistant/components/recorder/services.yaml index b74dcc2a494..7d7b926548c 100644 --- a/homeassistant/components/recorder/services.yaml +++ b/homeassistant/components/recorder/services.yaml @@ -20,20 +20,21 @@ purge: boolean: purge_entities: - target: - entity: {} fields: + entity_id: + required: false + selector: + entity: + multiple: true domains: example: "sun" required: false - default: [] selector: object: entity_globs: example: "domain*.object_id*" required: false - default: [] selector: object: diff --git a/homeassistant/components/recorder/strings.json b/homeassistant/components/recorder/strings.json index 74b248354d7..bf5d95ae1fc 100644 --- a/homeassistant/components/recorder/strings.json +++ b/homeassistant/components/recorder/strings.json @@ -41,6 +41,10 @@ "name": "Purge entities", "description": "Starts a purge task to remove the data related to specific entities from your database.", "fields": { + "entity_id": { + "name": "Entities to remove", + "description": "List of entities for which the data is to be removed from the recorder database." + }, "domains": { "name": "Domains to remove", "description": "List of domains for which the data needs to be removed from the recorder database." diff --git a/homeassistant/components/renault/select.py b/homeassistant/components/renault/select.py index f6c8f73d24b..eb79e197937 100644 --- a/homeassistant/components/renault/select.py +++ b/homeassistant/components/renault/select.py @@ -71,6 +71,6 @@ SENSOR_TYPES: tuple[RenaultSelectEntityDescription, ...] = ( coordinator="charge_mode", data_key="chargeMode", translation_key="charge_mode", - options=["always", "always_charging", "schedule_mode"], + options=["always", "always_charging", "schedule_mode", "scheduled"], ), ) diff --git a/homeassistant/components/rfxtrx/manifest.json b/homeassistant/components/rfxtrx/manifest.json index ec902855f27..bb3701e2e31 100644 --- a/homeassistant/components/rfxtrx/manifest.json +++ b/homeassistant/components/rfxtrx/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/rfxtrx", "iot_class": "local_push", "loggers": ["RFXtrx"], - "requirements": ["pyRFXtrx==0.31.0"] + "requirements": ["pyRFXtrx==0.31.1"] } diff --git a/homeassistant/components/risco/manifest.json b/homeassistant/components/risco/manifest.json index 4c590b95e52..22e73a10d6d 100644 --- a/homeassistant/components/risco/manifest.json +++ b/homeassistant/components/risco/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_push", "loggers": ["pyrisco"], "quality_scale": "platinum", - "requirements": ["pyrisco==0.6.0"] + "requirements": ["pyrisco==0.6.1"] } diff --git a/homeassistant/components/risco/sensor.py b/homeassistant/components/risco/sensor.py index f4d6ddaf451..8f97c76c879 100644 --- a/homeassistant/components/risco/sensor.py +++ b/homeassistant/components/risco/sensor.py @@ -56,8 +56,8 @@ async def async_setup_entry( config_entry.entry_id ][EVENTS_COORDINATOR] sensors = [ - RiscoSensor(coordinator, id, [], name, config_entry.entry_id) - for id, name in CATEGORIES.items() + RiscoSensor(coordinator, category_id, [], name, config_entry.entry_id) + for category_id, name in CATEGORIES.items() ] sensors.append( RiscoSensor( diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index b72fec5a8e1..12a884dba48 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -107,7 +107,9 @@ async def setup_device( home_data_rooms: list[HomeDataRoom], ) -> RoborockDataUpdateCoordinator | None: """Set up a device Coordinator.""" - mqtt_client = RoborockMqttClientV1(user_data, DeviceData(device, product_info.name)) + mqtt_client = RoborockMqttClientV1( + user_data, DeviceData(device, product_info.model) + ) try: networking = await mqtt_client.get_networking() if networking is None: diff --git a/homeassistant/components/roborock/device.py b/homeassistant/components/roborock/device.py index 69384d6e23a..6450d849859 100644 --- a/homeassistant/components/roborock/device.py +++ b/homeassistant/components/roborock/device.py @@ -137,4 +137,4 @@ class RoborockCoordinatedEntity( else: self.coordinator.roborock_device_info.props.consumable = value self.coordinator.data = self.coordinator.roborock_device_info.props - self.async_write_ha_state() + self.schedule_update_ha_state() diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index d8108abf78c..16cf518aa02 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -178,4 +178,8 @@ class RoborockVacuum(RoborockCoordinatedEntity, StateVacuumEntity): async def get_maps(self) -> ServiceResponse: """Get map information such as map id and room ids.""" - return {"maps": [asdict(map) for map in self.coordinator.maps.values()]} + return { + "maps": [ + asdict(vacuum_map) for vacuum_map in self.coordinator.maps.values() + ] + } diff --git a/homeassistant/components/romy/binary_sensor.py b/homeassistant/components/romy/binary_sensor.py new file mode 100644 index 00000000000..d8f6216007f --- /dev/null +++ b/homeassistant/components/romy/binary_sensor.py @@ -0,0 +1,73 @@ +"""Checking binary status values from your ROMY.""" + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import RomyVacuumCoordinator +from .entity import RomyEntity + +BINARY_SENSORS: list[BinarySensorEntityDescription] = [ + BinarySensorEntityDescription( + key="dustbin", + translation_key="dustbin_present", + ), + BinarySensorEntityDescription( + key="dock", + translation_key="docked", + device_class=BinarySensorDeviceClass.PLUG, + ), + BinarySensorEntityDescription( + key="water_tank", + translation_key="water_tank_present", + device_class=BinarySensorDeviceClass.MOISTURE, + ), + BinarySensorEntityDescription( + key="water_tank_empty", + translation_key="water_tank_empty", + device_class=BinarySensorDeviceClass.PROBLEM, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up ROMY vacuum cleaner.""" + + coordinator: RomyVacuumCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + RomyBinarySensor(coordinator, entity_description) + for entity_description in BINARY_SENSORS + if entity_description.key in coordinator.romy.binary_sensors + ) + + +class RomyBinarySensor(RomyEntity, BinarySensorEntity): + """RomyBinarySensor Class.""" + + entity_description: BinarySensorEntityDescription + + def __init__( + self, + coordinator: RomyVacuumCoordinator, + entity_description: BinarySensorEntityDescription, + ) -> None: + """Initialize the RomyBinarySensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{entity_description.key}_{self.romy.unique_id}" + self.entity_description = entity_description + + @property + def is_on(self) -> bool: + """Return the value of the sensor.""" + return bool(self.romy.binary_sensors[self.entity_description.key]) diff --git a/homeassistant/components/romy/const.py b/homeassistant/components/romy/const.py index 5d42380902b..a41482ffe59 100644 --- a/homeassistant/components/romy/const.py +++ b/homeassistant/components/romy/const.py @@ -6,6 +6,6 @@ import logging from homeassistant.const import Platform DOMAIN = "romy" -PLATFORMS = [Platform.VACUUM] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.VACUUM] UPDATE_INTERVAL = timedelta(seconds=5) LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/romy/icons.json b/homeassistant/components/romy/icons.json new file mode 100644 index 00000000000..3425d5cfade --- /dev/null +++ b/homeassistant/components/romy/icons.json @@ -0,0 +1,37 @@ +{ + "entity": { + "binary_sensor": { + "water_tank_empty": { + "default": "mdi:cup-outline", + "state": { + "off": "mdi:cup-water", + "on": "mdi:cup-outline" + } + }, + "dustbin_present": { + "default": "mdi:basket-check", + "state": { + "off": "mdi:basket-remove", + "on": "mdi:basket-check" + } + } + }, + "sensor": { + "dustbin_sensor": { + "default": "mdi:basket-fill" + }, + "total_cleaning_time": { + "default": "mdi:clock" + }, + "total_number_of_cleaning_runs": { + "default": "mdi:counter" + }, + "total_area_cleaned": { + "default": "mdi:texture-box" + }, + "total_distance_driven": { + "default": "mdi:run" + } + } + } +} diff --git a/homeassistant/components/romy/sensor.py b/homeassistant/components/romy/sensor.py new file mode 100644 index 00000000000..bdd486c4f8f --- /dev/null +++ b/homeassistant/components/romy/sensor.py @@ -0,0 +1,112 @@ +"""Sensor checking adc and status values from your ROMY.""" + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + AREA_SQUARE_METERS, + PERCENTAGE, + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfLength, + UnitOfTime, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import RomyVacuumCoordinator +from .entity import RomyEntity + +SENSORS: list[SensorEntityDescription] = [ + SensorEntityDescription( + key="battery_level", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="rssi", + entity_registry_enabled_default=False, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="dustbin_sensor", + translation_key="dustbin_sensor", + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="total_cleaning_time", + translation_key="total_cleaning_time", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="total_number_of_cleaning_runs", + translation_key="total_number_of_cleaning_runs", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="runs", + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="total_area_cleaned", + translation_key="total_area_cleaned", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=AREA_SQUARE_METERS, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SensorEntityDescription( + key="total_distance_driven", + translation_key="total_distance_driven", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfLength.METERS, + entity_category=EntityCategory.DIAGNOSTIC, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up ROMY vacuum cleaner.""" + + coordinator: RomyVacuumCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + RomySensor(coordinator, entity_description) + for entity_description in SENSORS + if entity_description.key in coordinator.romy.sensors + ) + + +class RomySensor(RomyEntity, SensorEntity): + """RomySensor Class.""" + + entity_description: SensorEntityDescription + + def __init__( + self, + coordinator: RomyVacuumCoordinator, + entity_description: SensorEntityDescription, + ) -> None: + """Initialize ROMYs StatusSensor.""" + super().__init__(coordinator) + self._attr_unique_id = f"{entity_description.key}_{self.romy.unique_id}" + self.entity_description = entity_description + + @property + def native_value(self) -> int: + """Return the value of the sensor.""" + value: int = self.romy.sensors[self.entity_description.key] + return value diff --git a/homeassistant/components/romy/strings.json b/homeassistant/components/romy/strings.json index 26dc60a2e84..78721da17ba 100644 --- a/homeassistant/components/romy/strings.json +++ b/homeassistant/components/romy/strings.json @@ -46,6 +46,37 @@ } } } + }, + "binary_sensor": { + "dustbin_present": { + "name": "Dustbin present" + }, + "docked": { + "name": "Robot docked" + }, + "water_tank_present": { + "name": "Watertank present" + }, + "water_tank_empty": { + "name": "Watertank empty" + } + }, + "sensor": { + "dustbin_sensor": { + "name": "Dustbin dirt level" + }, + "total_cleaning_time": { + "name": "Total cleaning time" + }, + "total_number_of_cleaning_runs": { + "name": "Total cleaning runs" + }, + "total_area_cleaned": { + "name": "Total cleaned area" + }, + "total_distance_driven": { + "name": "Total distance driven" + } } } } diff --git a/homeassistant/components/rss_feed_template/__init__.py b/homeassistant/components/rss_feed_template/__init__.py index 8d2e47315ef..debff5a6e96 100644 --- a/homeassistant/components/rss_feed_template/__init__.py +++ b/homeassistant/components/rss_feed_template/__init__.py @@ -91,9 +91,7 @@ class RssView(HomeAssistantView): response += '\n' response += " \n" if self._title is not None: - response += " %s\n" % escape( - self._title.async_render(parse_result=False) - ) + response += f" {escape(self._title.async_render(parse_result=False))}\n" else: response += " Home Assistant\n" diff --git a/homeassistant/components/samsungtv/media_player.py b/homeassistant/components/samsungtv/media_player.py index 36715c44a9b..ff347431a4a 100644 --- a/homeassistant/components/samsungtv/media_player.py +++ b/homeassistant/components/samsungtv/media_player.py @@ -46,15 +46,17 @@ from .triggers.turn_on import async_get_turn_on_trigger SOURCES = {"TV": "KEY_TV", "HDMI": "KEY_HDMI"} SUPPORT_SAMSUNGTV = ( - MediaPlayerEntityFeature.PAUSE - | MediaPlayerEntityFeature.VOLUME_STEP - | MediaPlayerEntityFeature.VOLUME_MUTE - | MediaPlayerEntityFeature.PREVIOUS_TRACK - | MediaPlayerEntityFeature.SELECT_SOURCE - | MediaPlayerEntityFeature.NEXT_TRACK - | MediaPlayerEntityFeature.TURN_OFF + MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PLAY_MEDIA + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.SELECT_SOURCE + | MediaPlayerEntityFeature.STOP + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.VOLUME_MUTE + | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP ) diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index 1d06e1a24c4..a955e861c20 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -747,13 +747,15 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): return value - def _suggested_precision_or_none(self) -> int | None: - """Return suggested display precision, or None if not set.""" + def _display_precision_or_none(self) -> int | None: + """Return display precision, or None if not set.""" assert self.registry_entry - if (sensor_options := self.registry_entry.options.get(DOMAIN)) and ( - precision := sensor_options.get("suggested_display_precision") - ) is not None: - return cast(int, precision) + if not (sensor_options := self.registry_entry.options.get(DOMAIN)): + return None + + for option in ("display_precision", "suggested_display_precision"): + if (precision := sensor_options.get(option)) is not None: + return cast(int, precision) return None def _update_suggested_precision(self) -> None: @@ -784,11 +786,6 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ratio_log = floor(ratio_log) if ratio_log > 0 else ceil(ratio_log) display_precision = max(0, display_precision + ratio_log) - if display_precision is None and ( - DOMAIN not in self.registry_entry.options - or "suggested_display_precision" not in self.registry_entry.options - ): - return sensor_options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {}) if ( "suggested_display_precision" in sensor_options @@ -835,7 +832,7 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Called when the entity registry entry has been updated and before the sensor is added to the state machine. """ - self._sensor_option_display_precision = self._suggested_precision_or_none() + self._sensor_option_display_precision = self._display_precision_or_none() assert self.registry_entry if ( sensor_options := self.registry_entry.options.get(f"{DOMAIN}.private") diff --git a/homeassistant/components/seventeentrack/__init__.py b/homeassistant/components/seventeentrack/__init__.py index 183d1bd4068..40c9c8d58d1 100644 --- a/homeassistant/components/seventeentrack/__init__.py +++ b/homeassistant/components/seventeentrack/__init__.py @@ -4,14 +4,80 @@ from py17track import Client as SeventeenTrackClient from py17track.errors import SeventeenTrackError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import ( + ATTR_FRIENDLY_NAME, + ATTR_LOCATION, + CONF_PASSWORD, + CONF_USERNAME, + Platform, +) +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType +from homeassistant.util import slugify -from .const import DOMAIN +from .const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_INFO_TEXT, + ATTR_PACKAGE_STATE, + ATTR_STATUS, + ATTR_TIMESTAMP, + ATTR_TRACKING_NUMBER, + DOMAIN, + SERVICE_GET_PACKAGES, +) +from .coordinator import SeventeenTrackCoordinator -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] + +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the 17Track component.""" + + async def get_packages(call: ServiceCall) -> ServiceResponse: + """Get packages from 17Track.""" + config_entry_id = call.data[ATTR_CONFIG_ENTRY_ID] + package_states = call.data.get(ATTR_PACKAGE_STATE, []) + seventeen_coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][ + config_entry_id + ] + live_packages = sorted( + await seventeen_coordinator.client.profile.packages( + show_archived=seventeen_coordinator.show_archived + ) + ) + + return { + "packages": [ + { + ATTR_TRACKING_NUMBER: package.tracking_number, + ATTR_LOCATION: package.location, + ATTR_STATUS: package.status, + ATTR_TIMESTAMP: package.timestamp, + ATTR_INFO_TEXT: package.info_text, + ATTR_FRIENDLY_NAME: package.friendly_name, + } + for package in live_packages + if slugify(package.status) in package_states or package_states == [] + ] + } + + hass.services.async_register( + DOMAIN, + SERVICE_GET_PACKAGES, + get_packages, + supports_response=SupportsResponse.ONLY, + ) + return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -25,8 +91,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except SeventeenTrackError as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = client + seventeen_coordinator = SeventeenTrackCoordinator(hass, client) + await seventeen_coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = seventeen_coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True diff --git a/homeassistant/components/seventeentrack/const.py b/homeassistant/components/seventeentrack/const.py index 6f8ae1b221c..39932d31935 100644 --- a/homeassistant/components/seventeentrack/const.py +++ b/homeassistant/components/seventeentrack/const.py @@ -1,6 +1,9 @@ """Constants for the 17track.net component.""" from datetime import timedelta +import logging + +LOGGER = logging.getLogger(__package__) ATTR_DESTINATION_COUNTRY = "destination_country" ATTR_INFO_TEXT = "info_text" @@ -37,3 +40,8 @@ NOTIFICATION_DELIVERED_MESSAGE = ( ) VALUE_DELIVERED = "Delivered" + +SERVICE_GET_PACKAGES = "get_packages" + +ATTR_PACKAGE_STATE = "package_state" +ATTR_CONFIG_ENTRY_ID = "config_entry_id" diff --git a/homeassistant/components/seventeentrack/coordinator.py b/homeassistant/components/seventeentrack/coordinator.py new file mode 100644 index 00000000000..4da4969ed92 --- /dev/null +++ b/homeassistant/components/seventeentrack/coordinator.py @@ -0,0 +1,84 @@ +"""Coordinator for 17Track.""" + +from dataclasses import dataclass +from typing import Any + +from py17track import Client as SeventeenTrackClient +from py17track.errors import SeventeenTrackError +from py17track.package import Package + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import slugify + +from .const import ( + CONF_SHOW_ARCHIVED, + CONF_SHOW_DELIVERED, + DEFAULT_SCAN_INTERVAL, + DOMAIN, + LOGGER, +) + + +@dataclass +class SeventeenTrackData: + """Class for handling the data retrieval.""" + + summary: dict[str, dict[str, Any]] + live_packages: dict[str, Package] + + +class SeventeenTrackCoordinator(DataUpdateCoordinator[SeventeenTrackData]): + """Class to manage fetching 17Track data.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, client: SeventeenTrackClient) -> None: + """Initialize.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=DEFAULT_SCAN_INTERVAL, + ) + self.show_delivered = self.config_entry.options[CONF_SHOW_DELIVERED] + self.account_id = client.profile.account_id + + self.show_archived = self.config_entry.options[CONF_SHOW_ARCHIVED] + self.client = client + + async def _async_update_data(self) -> SeventeenTrackData: + """Fetch data from 17Track API.""" + + try: + summary = await self.client.profile.summary( + show_archived=self.show_archived + ) + + live_packages = set( + await self.client.profile.packages(show_archived=self.show_archived) + ) + + except SeventeenTrackError as err: + raise UpdateFailed(err) from err + + summary_dict = {} + live_packages_dict = {} + + for status, quantity in summary.items(): + summary_dict[slugify(status)] = { + "quantity": quantity, + "packages": [], + "status_name": status, + } + + for package in live_packages: + live_packages_dict[package.tracking_number] = package + summary_value = summary_dict.get(slugify(package.status)) + if summary_value: + summary_value["packages"].append(package) + + return SeventeenTrackData( + summary=summary_dict, live_packages=live_packages_dict + ) diff --git a/homeassistant/components/seventeentrack/icons.json b/homeassistant/components/seventeentrack/icons.json new file mode 100644 index 00000000000..78ca65edc4d --- /dev/null +++ b/homeassistant/components/seventeentrack/icons.json @@ -0,0 +1,33 @@ +{ + "entity": { + "sensor": { + "not_found": { + "default": "mdi:package" + }, + "in_transit": { + "default": "mdi:package" + }, + "expired": { + "default": "mdi:package" + }, + "ready_to_be_picked_up": { + "default": "mdi:package" + }, + "undelivered": { + "default": "mdi:package" + }, + "delivered": { + "default": "mdi:package" + }, + "returned": { + "default": "mdi:package" + }, + "package": { + "default": "mdi:package" + } + } + }, + "services": { + "get_packages": "mdi:package" + } +} diff --git a/homeassistant/components/seventeentrack/sensor.py b/homeassistant/components/seventeentrack/sensor.py index 1de627fab39..acc8471c030 100644 --- a/homeassistant/components/seventeentrack/sensor.py +++ b/homeassistant/components/seventeentrack/sensor.py @@ -2,10 +2,8 @@ from __future__ import annotations -import logging +from typing import Any -from py17track.errors import SeventeenTrackError -from py17track.package import Package import voluptuous as vol from homeassistant.components import persistent_notification @@ -17,15 +15,16 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_validation as cv, entity, entity_registry as er +from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType -from homeassistant.util import Throttle, slugify +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import SeventeenTrackCoordinator from .const import ( ATTR_DESTINATION_COUNTRY, ATTR_INFO_TEXT, @@ -39,17 +38,14 @@ from .const import ( ATTRIBUTION, CONF_SHOW_ARCHIVED, CONF_SHOW_DELIVERED, - DEFAULT_SCAN_INTERVAL, DOMAIN, - ENTITY_ID_TEMPLATE, + LOGGER, NOTIFICATION_DELIVERED_MESSAGE, NOTIFICATION_DELIVERED_TITLE, UNIQUE_ID_TEMPLATE, VALUE_DELIVERED, ) -_LOGGER = logging.getLogger(__name__) - PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_USERNAME): cv.string, @@ -111,81 +107,158 @@ async def async_setup_entry( ) -> None: """Set up a 17Track sensor entry.""" - client = hass.data[DOMAIN][config_entry.entry_id] + coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][config_entry.entry_id] + previous_tracking_numbers: set[str] = set() - data = SeventeenTrackData( - client, - async_add_entities, - DEFAULT_SCAN_INTERVAL, - config_entry.options[CONF_SHOW_ARCHIVED], - config_entry.options[CONF_SHOW_DELIVERED], - str(hass.config.time_zone), + @callback + def _async_create_remove_entities(): + live_tracking_numbers = set(coordinator.data.live_packages.keys()) + + new_tracking_numbers = live_tracking_numbers - previous_tracking_numbers + old_tracking_numbers = previous_tracking_numbers - live_tracking_numbers + + previous_tracking_numbers.update(live_tracking_numbers) + + packages_to_add = [ + coordinator.data.live_packages[tracking_number] + for tracking_number in new_tracking_numbers + ] + + for package_data in coordinator.data.live_packages.values(): + if ( + package_data.status == VALUE_DELIVERED + and not coordinator.show_delivered + ): + old_tracking_numbers.add(package_data.tracking_number) + notify_delivered( + hass, + package_data.friendly_name, + package_data.tracking_number, + ) + + remove_packages(hass, coordinator.account_id, old_tracking_numbers) + + async_add_entities( + SeventeenTrackPackageSensor( + coordinator, + package_data.tracking_number, + ) + for package_data in packages_to_add + if not ( + not coordinator.show_delivered and package_data.status == "Delivered" + ) + ) + + async_add_entities( + SeventeenTrackSummarySensor(status, coordinator) + for status, summary_data in coordinator.data.summary.items() + ) + + _async_create_remove_entities() + + config_entry.async_on_unload( + coordinator.async_add_listener(_async_create_remove_entities) ) - await data.async_update() -class SeventeenTrackSummarySensor(SensorEntity): - """Define a summary sensor.""" +class SeventeenTrackSensor(CoordinatorEntity[SeventeenTrackCoordinator], SensorEntity): + """Define a 17Track sensor.""" _attr_attribution = ATTRIBUTION - _attr_icon = "mdi:package" + _attr_has_entity_name = True + + def __init__(self, coordinator: SeventeenTrackCoordinator) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.account_id)}, + entry_type=DeviceEntryType.SERVICE, + name="17Track", + ) + + +class SeventeenTrackSummarySensor(SeventeenTrackSensor): + """Define a summary sensor.""" + _attr_native_unit_of_measurement = "packages" - def __init__(self, data, status, initial_state) -> None: - """Initialize.""" - self._attr_extra_state_attributes = {} - self._data = data - self._state = initial_state + def __init__( + self, + status: str, + coordinator: SeventeenTrackCoordinator, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) self._status = status - self._attr_name = f"Seventeentrack Packages {status}" - self._attr_unique_id = f"summary_{data.account_id}_{slugify(status)}" + self._attr_translation_key = status + self._attr_unique_id = f"summary_{coordinator.account_id}_{status}" @property def available(self) -> bool: """Return whether the entity is available.""" - return self._state is not None + return self._status in self.coordinator.data.summary + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.coordinator.data.summary[self._status]["quantity"] + + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes.""" + packages = self.coordinator.data.summary[self._status]["packages"] + return { + ATTR_PACKAGES: [ + { + ATTR_TRACKING_NUMBER: package.tracking_number, + ATTR_LOCATION: package.location, + ATTR_STATUS: package.status, + ATTR_TIMESTAMP: package.timestamp, + ATTR_INFO_TEXT: package.info_text, + ATTR_FRIENDLY_NAME: package.friendly_name, + } + for package in packages + ] + } + + +class SeventeenTrackPackageSensor(SeventeenTrackSensor): + """Define an individual package sensor.""" + + _attr_translation_key = "package" + + def __init__( + self, + coordinator: SeventeenTrackCoordinator, + tracking_number: str, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self._tracking_number = tracking_number + self._previous_status = coordinator.data.live_packages[tracking_number].status + self._attr_unique_id = UNIQUE_ID_TEMPLATE.format( + coordinator.account_id, tracking_number + ) + package = coordinator.data.live_packages[tracking_number] + if not (name := package.friendly_name): + name = tracking_number + self._attr_translation_placeholders = {"name": name} + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + return self._tracking_number in self.coordinator.data.live_packages @property def native_value(self) -> StateType: """Return the state.""" - return self._state + return self.coordinator.data.live_packages[self._tracking_number].status - async def async_update(self) -> None: - """Update the sensor.""" - await self._data.async_update() - - package_data = [] - for package in self._data.packages.values(): - if package.status != self._status: - continue - - package_data.append( - { - ATTR_FRIENDLY_NAME: package.friendly_name, - ATTR_INFO_TEXT: package.info_text, - ATTR_TIMESTAMP: package.timestamp, - ATTR_STATUS: package.status, - ATTR_LOCATION: package.location, - ATTR_TRACKING_NUMBER: package.tracking_number, - } - ) - - self._attr_extra_state_attributes[ATTR_PACKAGES] = ( - package_data if package_data else None - ) - - self._state = self._data.summary.get(self._status) - - -class SeventeenTrackPackageSensor(SensorEntity): - """Define an individual package sensor.""" - - _attr_attribution = ATTRIBUTION - _attr_icon = "mdi:package" - - def __init__(self, data, package) -> None: - """Initialize.""" - self._attr_extra_state_attributes = { + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes.""" + package = self.coordinator.data.live_packages[self._tracking_number] + return { ATTR_DESTINATION_COUNTRY: package.destination_country, ATTR_INFO_TEXT: package.info_text, ATTR_TIMESTAMP: package.timestamp, @@ -195,158 +268,30 @@ class SeventeenTrackPackageSensor(SensorEntity): ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, ATTR_TRACKING_NUMBER: package.tracking_number, } - self._data = data - self._friendly_name = package.friendly_name - self._state = package.status - self._tracking_number = package.tracking_number - self.entity_id = ENTITY_ID_TEMPLATE.format(self._tracking_number) - self._attr_unique_id = UNIQUE_ID_TEMPLATE.format( - data.account_id, self._tracking_number - ) - @property - def available(self) -> bool: - """Return whether the entity is available.""" - return self._data.packages.get(self._tracking_number) is not None - @property - def name(self) -> str: - """Return the name.""" - if not (name := self._friendly_name): - name = self._tracking_number - return f"Seventeentrack Package: {name}" - - @property - def native_value(self) -> StateType: - """Return the state.""" - return self._state - - async def async_update(self) -> None: - """Update the sensor.""" - await self._data.async_update() - - if not self.available: - # Entity cannot be removed while its being added - async_call_later(self.hass, 1, self._remove) - return - - package = self._data.packages.get(self._tracking_number, None) - - # If the user has elected to not see delivered packages and one gets - # delivered, post a notification: - if package.status == VALUE_DELIVERED and not self._data.show_delivered: - self._notify_delivered() - # Entity cannot be removed while its being added - async_call_later(self.hass, 1, self._remove) - return - - self._attr_extra_state_attributes.update( - { - ATTR_INFO_TEXT: package.info_text, - ATTR_TIMESTAMP: package.timestamp, - ATTR_LOCATION: package.location, - } - ) - self._state = package.status - self._friendly_name = package.friendly_name - - async def _remove(self, *_): - """Remove entity itself.""" - await self.async_remove(force_remove=True) - - reg = er.async_get(self.hass) +def remove_packages(hass: HomeAssistant, account_id: str, packages: set[str]) -> None: + """Remove entity itself.""" + reg = er.async_get(hass) + for package in packages: entity_id = reg.async_get_entity_id( "sensor", "seventeentrack", - UNIQUE_ID_TEMPLATE.format(self._data.account_id, self._tracking_number), + UNIQUE_ID_TEMPLATE.format(account_id, package), ) if entity_id: reg.async_remove(entity_id) - def _notify_delivered(self): - """Notify when package is delivered.""" - _LOGGER.info("Package delivered: %s", self._tracking_number) - identification = ( - self._friendly_name if self._friendly_name else self._tracking_number - ) - message = NOTIFICATION_DELIVERED_MESSAGE.format( - identification, self._tracking_number - ) - title = NOTIFICATION_DELIVERED_TITLE.format(identification) - notification_id = NOTIFICATION_DELIVERED_TITLE.format(self._tracking_number) +def notify_delivered(hass: HomeAssistant, friendly_name: str, tracking_number: str): + """Notify when package is delivered.""" + LOGGER.debug("Package delivered: %s", tracking_number) - persistent_notification.create( - self.hass, message, title=title, notification_id=notification_id - ) + identification = friendly_name if friendly_name else tracking_number + message = NOTIFICATION_DELIVERED_MESSAGE.format(identification, tracking_number) + title = NOTIFICATION_DELIVERED_TITLE.format(identification) + notification_id = NOTIFICATION_DELIVERED_TITLE.format(tracking_number) - -class SeventeenTrackData: - """Define a data handler for 17track.net.""" - - def __init__( - self, - client, - async_add_entities, - scan_interval, - show_archived, - show_delivered, - timezone, - ) -> None: - """Initialize.""" - self._async_add_entities = async_add_entities - self._client = client - self._scan_interval = scan_interval - self._show_archived = show_archived - self.account_id = client.profile.account_id - self.packages: dict[str, Package] = {} - self.show_delivered = show_delivered - self.timezone = timezone - self.summary: dict[str, int] = {} - self.async_update = Throttle(self._scan_interval)(self._async_update) - self.first_update = True - - async def _async_update(self): - """Get updated data from 17track.net.""" - entities: list[entity.Entity] = [] - - try: - packages = await self._client.profile.packages( - show_archived=self._show_archived, tz=self.timezone - ) - _LOGGER.debug("New package data received: %s", packages) - - new_packages = {p.tracking_number: p for p in packages} - - to_add = set(new_packages) - set(self.packages) - - _LOGGER.debug("Will add new tracking numbers: %s", to_add) - if to_add: - entities.extend( - SeventeenTrackPackageSensor(self, new_packages[tracking_number]) - for tracking_number in to_add - ) - - self.packages = new_packages - except SeventeenTrackError as err: - _LOGGER.error("There was an error retrieving packages: %s", err) - - try: - self.summary = await self._client.profile.summary( - show_archived=self._show_archived - ) - _LOGGER.debug("New summary data received: %s", self.summary) - - # creating summary sensors on first update - if self.first_update: - self.first_update = False - entities.extend( - SeventeenTrackSummarySensor(self, status, quantity) - for status, quantity in self.summary.items() - ) - - except SeventeenTrackError as err: - _LOGGER.error("There was an error retrieving the summary: %s", err) - self.summary = {} - - self._async_add_entities(entities, True) + persistent_notification.create( + hass, message, title=title, notification_id=notification_id + ) diff --git a/homeassistant/components/seventeentrack/services.yaml b/homeassistant/components/seventeentrack/services.yaml new file mode 100644 index 00000000000..41cb66ada5f --- /dev/null +++ b/homeassistant/components/seventeentrack/services.yaml @@ -0,0 +1,20 @@ +get_packages: + fields: + package_state: + selector: + select: + multiple: true + options: + - "not_found" + - "in_transit" + - "expired" + - "ready_to_be_picked_up" + - "undelivered" + - "delivered" + - "returned" + translation_key: package_state + config_entry_id: + required: true + selector: + config_entry: + integration: seventeentrack diff --git a/homeassistant/components/seventeentrack/strings.json b/homeassistant/components/seventeentrack/strings.json index 39ddb5ef8ef..626af29e856 100644 --- a/homeassistant/components/seventeentrack/strings.json +++ b/homeassistant/components/seventeentrack/strings.json @@ -38,5 +38,62 @@ "title": "The 17Track YAML configuration import request failed due to invalid authentication", "description": "Configuring 17Track using YAML is being removed but there were invalid credentials provided while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your 17Track credentials are correct and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the 17Track configuration from your YAML configuration entirely, restart Home Assistant, and add the 17Track integration manually." } + }, + "entity": { + "sensor": { + "not_found": { + "name": "Not found" + }, + "in_transit": { + "name": "In transit" + }, + "expired": { + "name": "Expired" + }, + "ready_to_be_picked_up": { + "name": "Ready to be picked up" + }, + "undelivered": { + "name": "Undelivered" + }, + "delivered": { + "name": "Delivered" + }, + "returned": { + "name": "Returned" + }, + "package": { + "name": "Package {name}" + } + } + }, + "services": { + "get_packages": { + "name": "Get packages", + "description": "Get packages from 17Track", + "fields": { + "package_state": { + "name": "Package states", + "description": "Only return packages with the specified states. Returns all packages if not specified." + }, + "config_entry_id": { + "name": "17Track service", + "description": "The packages will be retrieved for the selected service." + } + } + } + }, + "selector": { + "package_state": { + "options": { + "not_found": "[%key:component::seventeentrack::entity::sensor::not_found::name%]", + "in_transit": "[%key:component::seventeentrack::entity::sensor::in_transit::name%]", + "expired": "[%key:component::seventeentrack::entity::sensor::expired::name%]", + "ready_to_be_picked_up": "[%key:component::seventeentrack::entity::sensor::ready_to_be_picked_up::name%]", + "undelivered": "[%key:component::seventeentrack::entity::sensor::undelivered::name%]", + "delivered": "[%key:component::seventeentrack::entity::sensor::delivered::name%]", + "returned": "[%key:component::seventeentrack::entity::sensor::returned::name%]" + } + } } } diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index b368b38820e..81289bc1a9b 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -132,7 +132,11 @@ def async_setup_rpc_entry( climate_ids = [] for id_ in climate_key_ids: climate_ids.append(id_) - + # There are three configuration scenarios for WallDisplay: + # - relay mode (no thermostat) + # - thermostat mode using the internal relay as an actuator + # - thermostat mode using an external (from another device) relay as + # an actuator if is_rpc_thermostat_internal_actuator(coordinator.device.status): # Wall Display relay is used as the thermostat actuator, # we need to remove a switch entity diff --git a/homeassistant/components/shelly/switch.py b/homeassistant/components/shelly/switch.py index 14fec43c58b..81b16d48ab8 100644 --- a/homeassistant/components/shelly/switch.py +++ b/homeassistant/components/shelly/switch.py @@ -43,6 +43,7 @@ from .utils import ( is_block_channel_type_light, is_rpc_channel_type_light, is_rpc_thermostat_internal_actuator, + is_rpc_thermostat_mode, ) @@ -140,12 +141,19 @@ def async_setup_rpc_entry( continue if coordinator.model == MODEL_WALL_DISPLAY: - if not is_rpc_thermostat_internal_actuator(coordinator.device.status): - # Wall Display relay is not used as the thermostat actuator, - # we need to remove a climate entity + # There are three configuration scenarios for WallDisplay: + # - relay mode (no thermostat) + # - thermostat mode using the internal relay as an actuator + # - thermostat mode using an external (from another device) relay as + # an actuator + if not is_rpc_thermostat_mode(id_, coordinator.device.status): + # The device is not in thermostat mode, we need to remove a climate + # entity unique_id = f"{coordinator.mac}-thermostat:{id_}" async_remove_shelly_entity(hass, "climate", unique_id) - else: + elif is_rpc_thermostat_internal_actuator(coordinator.device.status): + # The internal relay is an actuator, skip this ID so as not to create + # a switch entity continue switch_ids.append(id_) diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index ce98e0d5c12..b7cb2f1476a 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -500,3 +500,8 @@ def async_remove_shelly_rpc_entities( if entity_id := entity_reg.async_get_entity_id(domain, DOMAIN, f"{mac}-{key}"): LOGGER.debug("Removing entity: %s", entity_id) entity_reg.async_remove(entity_id) + + +def is_rpc_thermostat_mode(ident: int, status: dict[str, Any]) -> bool: + """Return True if 'thermostat:' is present in the status.""" + return f"thermostat:{ident}" in status diff --git a/homeassistant/components/smartthings/__init__.py b/homeassistant/components/smartthings/__init__.py index 8136806cd0b..9bfa11d3293 100644 --- a/homeassistant/components/smartthings/__init__.py +++ b/homeassistant/components/smartthings/__init__.py @@ -28,6 +28,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_loaded_integration +from homeassistant.setup import SetupPhases, async_pause_setup from .config_flow import SmartThingsFlowHandler # noqa: F401 from .const import ( @@ -170,7 +171,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) # Setup device broker - broker = DeviceBroker(hass, entry, token, smart_app, devices, scenes) + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PLATFORMS): + # DeviceBroker has a side effect of importing platform + # modules when its created. In the future this should be + # refactored to not do this. + broker = await hass.async_add_import_executor_job( + DeviceBroker, hass, entry, token, smart_app, devices, scenes + ) broker.connect() hass.data[DOMAIN][DATA_BROKERS][entry.entry_id] = broker diff --git a/homeassistant/components/solaredge/__init__.py b/homeassistant/components/solaredge/__init__.py index 69e02c1875c..64f76372e91 100644 --- a/homeassistant/components/solaredge/__init__.py +++ b/homeassistant/components/solaredge/__init__.py @@ -4,13 +4,14 @@ from __future__ import annotations import socket -from requests.exceptions import ConnectTimeout, HTTPError -from solaredge import Solaredge +from aiohttp import ClientError +from aiosolaredge import SolarEdge from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from .const import CONF_SITE_ID, DATA_API_CLIENT, DOMAIN, LOGGER @@ -22,13 +23,12 @@ PLATFORMS = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up SolarEdge from a config entry.""" - api = Solaredge(entry.data[CONF_API_KEY]) + session = async_get_clientsession(hass) + api = SolarEdge(entry.data[CONF_API_KEY], session) try: - response = await hass.async_add_executor_job( - api.get_details, entry.data[CONF_SITE_ID] - ) - except (ConnectTimeout, HTTPError, socket.gaierror) as ex: + response = await api.get_details(entry.data[CONF_SITE_ID]) + except (TimeoutError, ClientError, socket.gaierror) as ex: LOGGER.error("Could not retrieve details from SolarEdge API") raise ConfigEntryNotReady from ex diff --git a/homeassistant/components/solaredge/config_flow.py b/homeassistant/components/solaredge/config_flow.py index b75af866549..6235e22400f 100644 --- a/homeassistant/components/solaredge/config_flow.py +++ b/homeassistant/components/solaredge/config_flow.py @@ -2,15 +2,17 @@ from __future__ import annotations +import socket from typing import Any -from requests.exceptions import ConnectTimeout, HTTPError -import solaredge +from aiohttp import ClientError +import aiosolaredge import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_NAME from homeassistant.core import callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.util import slugify from .const import CONF_SITE_ID, DEFAULT_NAME, DOMAIN @@ -38,15 +40,16 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN): """Return True if site_id exists in configuration.""" return site_id in self._async_current_site_ids() - def _check_site(self, site_id: str, api_key: str) -> bool: + async def _async_check_site(self, site_id: str, api_key: str) -> bool: """Check if we can connect to the soleredge api service.""" - api = solaredge.Solaredge(api_key) + session = async_get_clientsession(self.hass) + api = aiosolaredge.SolarEdge(api_key, session) try: - response = api.get_details(site_id) + response = await api.get_details(site_id) if response["details"]["status"].lower() != "active": self._errors[CONF_SITE_ID] = "site_not_active" return False - except (ConnectTimeout, HTTPError): + except (TimeoutError, ClientError, socket.gaierror): self._errors[CONF_SITE_ID] = "could_not_connect" return False except KeyError: @@ -66,9 +69,7 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN): else: site = user_input[CONF_SITE_ID] api = user_input[CONF_API_KEY] - can_connect = await self.hass.async_add_executor_job( - self._check_site, site, api - ) + can_connect = await self._async_check_site(site, api) if can_connect: return self.async_create_entry( title=name, data={CONF_SITE_ID: site, CONF_API_KEY: api} diff --git a/homeassistant/components/solaredge/coordinator.py b/homeassistant/components/solaredge/coordinator.py index d2da99820d7..0c264c1c514 100644 --- a/homeassistant/components/solaredge/coordinator.py +++ b/homeassistant/components/solaredge/coordinator.py @@ -6,7 +6,7 @@ from abc import ABC, abstractmethod from datetime import date, datetime, timedelta from typing import Any -from solaredge import Solaredge +from aiosolaredge import SolarEdge from stringcase import snakecase from homeassistant.core import HomeAssistant, callback @@ -27,7 +27,7 @@ class SolarEdgeDataService(ABC): coordinator: DataUpdateCoordinator[None] - def __init__(self, hass: HomeAssistant, api: Solaredge, site_id: str) -> None: + def __init__(self, hass: HomeAssistant, api: SolarEdge, site_id: str) -> None: """Initialize the data object.""" self.api = api self.site_id = site_id @@ -54,12 +54,8 @@ class SolarEdgeDataService(ABC): """Update interval.""" @abstractmethod - def update(self) -> None: - """Update data in executor.""" - async def async_update_data(self) -> None: """Update data.""" - await self.hass.async_add_executor_job(self.update) class SolarEdgeOverviewDataService(SolarEdgeDataService): @@ -70,10 +66,10 @@ class SolarEdgeOverviewDataService(SolarEdgeDataService): """Update interval.""" return OVERVIEW_UPDATE_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: - data = self.api.get_overview(self.site_id) + data = await self.api.get_overview(self.site_id) overview = data["overview"] except KeyError as ex: raise UpdateFailed("Missing overview data, skipping update") from ex @@ -113,11 +109,11 @@ class SolarEdgeDetailsDataService(SolarEdgeDataService): """Update interval.""" return DETAILS_UPDATE_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: - data = self.api.get_details(self.site_id) + data = await self.api.get_details(self.site_id) details = data["details"] except KeyError as ex: raise UpdateFailed("Missing details data, skipping update") from ex @@ -157,10 +153,10 @@ class SolarEdgeInventoryDataService(SolarEdgeDataService): """Update interval.""" return INVENTORY_UPDATE_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: - data = self.api.get_inventory(self.site_id) + data = await self.api.get_inventory(self.site_id) inventory = data["Inventory"] except KeyError as ex: raise UpdateFailed("Missing inventory data, skipping update") from ex @@ -178,7 +174,7 @@ class SolarEdgeInventoryDataService(SolarEdgeDataService): class SolarEdgeEnergyDetailsService(SolarEdgeDataService): """Get and update the latest power flow data.""" - def __init__(self, hass: HomeAssistant, api: Solaredge, site_id: str) -> None: + def __init__(self, hass: HomeAssistant, api: SolarEdge, site_id: str) -> None: """Initialize the power flow data service.""" super().__init__(hass, api, site_id) @@ -189,17 +185,16 @@ class SolarEdgeEnergyDetailsService(SolarEdgeDataService): """Update interval.""" return ENERGY_DETAILS_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: now = datetime.now() today = date.today() midnight = datetime.combine(today, datetime.min.time()) - data = self.api.get_energy_details( + data = await self.api.get_energy_details( self.site_id, midnight, - now.strftime("%Y-%m-%d %H:%M:%S"), - meters=None, + now, time_unit="DAY", ) energy_details = data["energyDetails"] @@ -239,7 +234,7 @@ class SolarEdgeEnergyDetailsService(SolarEdgeDataService): class SolarEdgePowerFlowDataService(SolarEdgeDataService): """Get and update the latest power flow data.""" - def __init__(self, hass: HomeAssistant, api: Solaredge, site_id: str) -> None: + def __init__(self, hass: HomeAssistant, api: SolarEdge, site_id: str) -> None: """Initialize the power flow data service.""" super().__init__(hass, api, site_id) @@ -250,10 +245,10 @@ class SolarEdgePowerFlowDataService(SolarEdgeDataService): """Update interval.""" return POWER_FLOW_UPDATE_DELAY - def update(self) -> None: + async def async_update_data(self) -> None: """Update the data from the SolarEdge Monitoring API.""" try: - data = self.api.get_current_power_flow(self.site_id) + data = await self.api.get_current_power_flow(self.site_id) power_flow = data["siteCurrentPowerFlow"] except KeyError as ex: raise UpdateFailed("Missing power flow data, skipping update") from ex diff --git a/homeassistant/components/solaredge/manifest.json b/homeassistant/components/solaredge/manifest.json index 22759b1be7c..02f96c0211f 100644 --- a/homeassistant/components/solaredge/manifest.json +++ b/homeassistant/components/solaredge/manifest.json @@ -1,7 +1,7 @@ { "domain": "solaredge", "name": "SolarEdge", - "codeowners": ["@frenck"], + "codeowners": ["@frenck", "@bdraco"], "config_flow": true, "dhcp": [ { @@ -12,6 +12,6 @@ "documentation": "https://www.home-assistant.io/integrations/solaredge", "integration_type": "device", "iot_class": "cloud_polling", - "loggers": ["solaredge"], - "requirements": ["solaredge==0.0.2", "stringcase==1.2.0"] + "loggers": ["aiosolaredge"], + "requirements": ["aiosolaredge==0.2.0", "stringcase==1.2.0"] } diff --git a/homeassistant/components/solaredge/sensor.py b/homeassistant/components/solaredge/sensor.py index 5ec65a3b9a5..b3345d5dc86 100644 --- a/homeassistant/components/solaredge/sensor.py +++ b/homeassistant/components/solaredge/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from dataclasses import dataclass from typing import Any -from solaredge import Solaredge +from aiosolaredge import SolarEdge from homeassistant.components.sensor import ( SensorDeviceClass, @@ -205,7 +205,7 @@ async def async_setup_entry( ) -> None: """Add an solarEdge entry.""" # Add the needed sensors to hass - api: Solaredge = hass.data[DOMAIN][entry.entry_id][DATA_API_CLIENT] + api: SolarEdge = hass.data[DOMAIN][entry.entry_id][DATA_API_CLIENT] sensor_factory = SolarEdgeSensorFactory(hass, entry.data[CONF_SITE_ID], api) for service in sensor_factory.all_services: @@ -223,7 +223,7 @@ async def async_setup_entry( class SolarEdgeSensorFactory: """Factory which creates sensors based on the sensor_key.""" - def __init__(self, hass: HomeAssistant, site_id: str, api: Solaredge) -> None: + def __init__(self, hass: HomeAssistant, site_id: str, api: SolarEdge) -> None: """Initialize the factory.""" details = SolarEdgeDetailsDataService(hass, api, site_id) diff --git a/homeassistant/components/sonos/media_browser.py b/homeassistant/components/sonos/media_browser.py index b6fc250ab23..eeadd7db232 100644 --- a/homeassistant/components/sonos/media_browser.py +++ b/homeassistant/components/sonos/media_browser.py @@ -199,9 +199,15 @@ def build_item_response( payload["search_type"] == MediaType.ALBUM and media[0].item_class == "object.item.audioItem.musicTrack" ): - item = get_media(media_library, payload["idstring"], SONOS_ALBUM_ARTIST) + idstring = payload["idstring"] + if idstring.startswith("A:ALBUMARTIST/"): + search_type = SONOS_ALBUM_ARTIST + elif idstring.startswith("A:ALBUM/"): + search_type = SONOS_ALBUM + item = get_media(media_library, idstring, search_type) + title = getattr(item, "title", None) - thumbnail = get_thumbnail_url(SONOS_ALBUM_ARTIST, payload["idstring"]) + thumbnail = get_thumbnail_url(search_type, payload["idstring"]) if not title: try: @@ -493,8 +499,9 @@ def get_content_id(item: DidlObject) -> str: def get_media( media_library: MusicLibrary, item_id: str, search_type: str -) -> MusicServiceItem: - """Fetch media/album.""" +) -> MusicServiceItem | None: + """Fetch a single media/album.""" + _LOGGER.debug("get_media item_id [%s], search_type [%s]", item_id, search_type) search_type = MEDIA_TYPES_TO_SONOS.get(search_type, search_type) if search_type == "playlists": @@ -513,9 +520,38 @@ def get_media( if not item_id.startswith("A:ALBUM") and search_type == SONOS_ALBUM: item_id = "A:ALBUMARTIST/" + "/".join(item_id.split("/")[2:]) - search_term = urllib.parse.unquote(item_id.split("/")[-1]) - matches = media_library.get_music_library_information( - search_type, search_term=search_term, full_album_art_uri=True + if item_id.startswith("A:ALBUM/") or search_type == "tracks": + search_term = urllib.parse.unquote(item_id.split("/")[-1]) + matches = media_library.get_music_library_information( + search_type, search_term=search_term, full_album_art_uri=True + ) + else: + # When requesting media by album_artist, composer, genre use the browse interface + # to navigate the hierarchy. This occurs when invoked from media browser or service + # calls + # Example: A:ALBUMARTIST/Neil Young/Greatest Hits - get specific album + # Example: A:ALBUMARTIST/Neil Young - get all albums + # Others: composer, genre + # A:// + splits = item_id.split("/") + title = urllib.parse.unquote(splits[2]) if len(splits) > 2 else None + browse_id_string = splits[0] + "/" + splits[1] + matches = media_library.browse_by_idstring( + search_type, browse_id_string, full_album_art_uri=True + ) + if title: + result = next( + (item for item in matches if (title == item.title)), + None, + ) + matches = [result] + + _LOGGER.debug( + "get_media search_type [%s] item_id [%s] matches [%d]", + search_type, + item_id, + len(matches), ) if len(matches) > 0: return matches[0] + return None diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 581bdaad37d..35c6be3fa6b 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -7,7 +7,7 @@ from functools import partial import logging from typing import Any -from soco import alarms +from soco import SoCo, alarms from soco.core import ( MUSIC_SRC_LINE_IN, MUSIC_SRC_RADIO, @@ -15,6 +15,7 @@ from soco.core import ( PLAY_MODES, ) from soco.data_structures import DidlFavorite +from soco.ms_data_structures import MusicServiceItem from sonos_websocket.exception import SonosWebsocketError import voluptuous as vol @@ -549,6 +550,7 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, is_radio: bool, **kwargs: Any ) -> None: """Wrap sync calls to async_play_media.""" + _LOGGER.debug("_play_media media_type %s media_id %s", media_type, media_id) enqueue = kwargs.get(ATTR_MEDIA_ENQUEUE, MediaPlayerEnqueue.REPLACE) if media_type == "favorite_item_id": @@ -645,10 +647,35 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): _LOGGER.error('Could not find "%s" in the library', media_id) return - soco.play_uri(item.get_uri()) + self._play_media_queue(soco, item, enqueue) else: _LOGGER.error('Sonos does not support a media type of "%s"', media_type) + def _play_media_queue( + self, soco: SoCo, item: MusicServiceItem, enqueue: MediaPlayerEnqueue + ): + """Manage adding, replacing, playing items onto the sonos queue.""" + _LOGGER.debug( + "_play_media_queue item_id [%s] title [%s] enqueue [%s]", + item.item_id, + item.title, + enqueue, + ) + if enqueue == MediaPlayerEnqueue.REPLACE: + soco.clear_queue() + + if enqueue in (MediaPlayerEnqueue.ADD, MediaPlayerEnqueue.REPLACE): + soco.add_to_queue(item, timeout=LONG_SERVICE_TIMEOUT) + if enqueue == MediaPlayerEnqueue.REPLACE: + soco.play_from_queue(0) + else: + pos = (self.media.queue_position or 0) + 1 + new_pos = soco.add_to_queue( + item, position=pos, timeout=LONG_SERVICE_TIMEOUT + ) + if enqueue == MediaPlayerEnqueue.PLAY: + soco.play_from_queue(new_pos - 1) + @soco_error() def set_sleep_timer(self, sleep_time: int) -> None: """Set the timer on the player.""" diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 7d072fa2570..a3a404fe1ae 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -28,7 +28,6 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_PORT, CONF_USERNAME, - EVENT_HOMEASSISTANT_START, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -44,6 +43,7 @@ from homeassistant.helpers.dispatcher import ( ) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.start import async_at_start from homeassistant.util.dt import utcnow from .browse_media import ( @@ -207,12 +207,7 @@ async def async_setup_entry( platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") # Start server discovery task if not already running - if hass.is_running: - hass.async_create_task(start_server_discovery(hass)) - else: - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_START, start_server_discovery(hass) - ) + config_entry.async_on_unload(async_at_start(hass, start_server_discovery)) class SqueezeBoxEntity(MediaPlayerEntity): diff --git a/homeassistant/components/stream/worker.py b/homeassistant/components/stream/worker.py index 670d6b93c0e..956c93d01a0 100644 --- a/homeassistant/components/stream/worker.py +++ b/homeassistant/components/stream/worker.py @@ -592,7 +592,7 @@ def stream_worker( except av.AVError as ex: container.close() raise StreamWorkerError( - "Error demuxing stream while finding first packet: %s" % str(ex) + f"Error demuxing stream while finding first packet: {str(ex)}" ) from ex muxer = StreamMuxer( @@ -617,7 +617,7 @@ def stream_worker( except StopIteration as ex: raise StreamEndedError("Stream ended; no additional packets") from ex except av.AVError as ex: - raise StreamWorkerError("Error demuxing stream: %s" % str(ex)) from ex + raise StreamWorkerError(f"Error demuxing stream: {str(ex)}") from ex muxer.mux_packet(packet) diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index ec13ec929a5..2748b27c93d 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -161,6 +161,8 @@ async def async_remove_config_entry_device( return not device_entry.identifiers.intersection( ( (DOMAIN, serial), # Base device - *((DOMAIN, f"{serial}_{id}") for id in device_ids), # Storage and cameras + *( + (DOMAIN, f"{serial}_{device_id}") for device_id in device_ids + ), # Storage and cameras ) ) diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index 5ab7a6f67b8..8f69ccdaffb 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -221,7 +221,7 @@ class TadoConnector: # Errors are planned to be converted to exceptions # in PyTado library, so this can be removed - if "errors" in mobile_devices and mobile_devices["errors"]: + if isinstance(mobile_devices, dict) and mobile_devices.get("errors"): _LOGGER.error( "Error for home ID %s while updating mobile devices: %s", self.home_id, @@ -256,7 +256,7 @@ class TadoConnector: # Errors are planned to be converted to exceptions # in PyTado library, so this can be removed - if "errors" in devices and devices["errors"]: + if isinstance(devices, dict) and devices.get("errors"): _LOGGER.error( "Error for home ID %s while updating devices: %s", self.home_id, diff --git a/homeassistant/components/tankerkoenig/manifest.json b/homeassistant/components/tankerkoenig/manifest.json index 4570d0e5649..c754094655d 100644 --- a/homeassistant/components/tankerkoenig/manifest.json +++ b/homeassistant/components/tankerkoenig/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/tankerkoenig", "iot_class": "cloud_polling", "loggers": ["aiotankerkoenig"], + "quality_scale": "platinum", "requirements": ["aiotankerkoenig==0.4.1"] } diff --git a/homeassistant/components/tankerkoenig/sensor.py b/homeassistant/components/tankerkoenig/sensor.py index f2fdc2c45b7..33476e75262 100644 --- a/homeassistant/components/tankerkoenig/sensor.py +++ b/homeassistant/components/tankerkoenig/sensor.py @@ -91,7 +91,7 @@ class FuelPriceSensor(TankerkoenigCoordinatorEntity, SensorEntity): self._fuel_type = fuel_type self._attr_translation_key = fuel_type self._attr_unique_id = f"{station.id}_{fuel_type}" - attrs = { + attrs: dict[str, int | str | float | None] = { ATTR_BRAND: station.brand, ATTR_FUEL_TYPE: fuel_type, ATTR_STATION_NAME: station.name, @@ -102,8 +102,8 @@ class FuelPriceSensor(TankerkoenigCoordinatorEntity, SensorEntity): } if coordinator.show_on_map: - attrs[ATTR_LATITUDE] = str(station.lat) - attrs[ATTR_LONGITUDE] = str(station.lng) + attrs[ATTR_LATITUDE] = station.lat + attrs[ATTR_LONGITUDE] = station.lng self._attr_extra_state_attributes = attrs @property diff --git a/homeassistant/components/tedee/coordinator.py b/homeassistant/components/tedee/coordinator.py index f3043b1d78d..069a7893974 100644 --- a/homeassistant/components/tedee/coordinator.py +++ b/homeassistant/components/tedee/coordinator.py @@ -100,9 +100,9 @@ class TedeeApiCoordinator(DataUpdateCoordinator[dict[int, TedeeLock]]): except TedeeDataUpdateException as ex: _LOGGER.debug("Error while updating data: %s", str(ex)) - raise UpdateFailed("Error while updating data: %s" % str(ex)) from ex + raise UpdateFailed(f"Error while updating data: {str(ex)}") from ex except (TedeeClientException, TimeoutError) as ex: - raise UpdateFailed("Querying API failed. Error: %s" % str(ex)) from ex + raise UpdateFailed(f"Querying API failed. Error: {str(ex)}") from ex def _async_add_remove_locks(self) -> None: """Add new locks, remove non-existing locks.""" diff --git a/homeassistant/components/tedee/lock.py b/homeassistant/components/tedee/lock.py index a720652bcbc..1c47ff2a6c1 100644 --- a/homeassistant/components/tedee/lock.py +++ b/homeassistant/components/tedee/lock.py @@ -90,7 +90,7 @@ class TedeeLockEntity(TedeeEntity, LockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - "Failed to unlock the door. Lock %s" % self._lock.lock_id + f"Failed to unlock the door. Lock {self._lock.lock_id}" ) from ex async def async_lock(self, **kwargs: Any) -> None: @@ -103,7 +103,7 @@ class TedeeLockEntity(TedeeEntity, LockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - "Failed to lock the door. Lock %s" % self._lock.lock_id + f"Failed to lock the door. Lock {self._lock.lock_id}" ) from ex @@ -125,5 +125,5 @@ class TedeeLockWithLatchEntity(TedeeLockEntity): await self.coordinator.async_request_refresh() except (TedeeClientException, Exception) as ex: raise HomeAssistantError( - "Failed to unlatch the door. Lock %s" % self._lock.lock_id + f"Failed to unlatch the door. Lock {self._lock.lock_id}" ) from ex diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index 897fd6a9bac..f672ae1547f 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -122,6 +122,7 @@ EVENT_TELEGRAM_SENT = "telegram_sent" PARSER_HTML = "html" PARSER_MD = "markdown" PARSER_MD2 = "markdownv2" +PARSER_PLAIN_TEXT = "plain_text" DEFAULT_TRUSTED_NETWORKS = [ip_network("149.154.160.0/20"), ip_network("91.108.4.0/22")] @@ -524,6 +525,7 @@ class TelegramNotificationService: PARSER_HTML: ParseMode.HTML, PARSER_MD: ParseMode.MARKDOWN, PARSER_MD2: ParseMode.MARKDOWN_V2, + PARSER_PLAIN_TEXT: None, } self._parse_mode = self._parsers.get(parser) self.bot = bot diff --git a/homeassistant/components/telegram_bot/services.yaml b/homeassistant/components/telegram_bot/services.yaml index 1587f754508..d2195c1d6ce 100644 --- a/homeassistant/components/telegram_bot/services.yaml +++ b/homeassistant/components/telegram_bot/services.yaml @@ -22,6 +22,7 @@ send_message: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -94,6 +95,7 @@ send_photo: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -229,6 +231,7 @@ send_animation: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -300,6 +303,7 @@ send_video: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -435,6 +439,7 @@ send_document: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_notification: selector: boolean: @@ -587,6 +592,7 @@ edit_message: - "html" - "markdown" - "markdownv2" + - "plain_text" disable_web_page_preview: selector: boolean: diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index 084d51ff31b..45fd1eee327 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -4,6 +4,7 @@ import asyncio from typing import Final from tesla_fleet_api import EnergySpecific, Teslemetry, VehicleSpecific +from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, @@ -37,6 +38,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: access_token=access_token, ) try: + scopes = (await teslemetry.metadata())["scopes"] products = (await teslemetry.products())["response"] except InvalidToken as e: raise ConfigEntryAuthFailed from e @@ -49,7 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: vehicles: list[TeslemetryVehicleData] = [] energysites: list[TeslemetryEnergyData] = [] for product in products: - if "vin" in product: + if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes: vin = product["vin"] api = VehicleSpecific(teslemetry.vehicle, vin) coordinator = TeslemetryVehicleDataCoordinator(hass, api) @@ -60,7 +62,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: vin=vin, ) ) - elif "energy_site_id" in product: + elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] api = EnergySpecific(teslemetry.energy, site_id) energysites.append( @@ -86,7 +88,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Setup Platforms hass.data.setdefault(DOMAIN, {})[entry.entry_id] = TeslemetryData( - vehicles, energysites + vehicles, energysites, scopes ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 0835785d194..4c1c05570ab 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -4,6 +4,8 @@ from __future__ import annotations from typing import Any +from tesla_fleet_api.const import Scope + from homeassistant.components.climate import ( ClimateEntity, ClimateEntityFeature, @@ -17,6 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, TeslemetryClimateSide from .context import handle_command from .entity import TeslemetryVehicleEntity +from .models import TeslemetryVehicleData async def async_setup_entry( @@ -26,7 +29,7 @@ async def async_setup_entry( data = hass.data[DOMAIN][entry.entry_id] async_add_entities( - TeslemetryClimateEntity(vehicle, TeslemetryClimateSide.DRIVER) + TeslemetryClimateEntity(vehicle, TeslemetryClimateSide.DRIVER, data.scopes) for vehicle in data.vehicles ) @@ -48,6 +51,22 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): _attr_preset_modes = ["off", "keep", "dog", "camp"] _enable_turn_on_off_backwards_compatibility = False + def __init__( + self, + data: TeslemetryVehicleData, + side: TeslemetryClimateSide, + scopes: Scope, + ) -> None: + """Initialize the climate.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + super().__init__( + data, + side, + ) + @property def hvac_mode(self) -> HVACMode | None: """Return hvac operation ie. heat, cool mode.""" @@ -82,6 +101,7 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_on(self) -> None: """Set the climate state to on.""" + self.raise_for_scope() with handle_command(): await self.wake_up_if_asleep() await self.api.auto_conditioning_start() @@ -89,6 +109,7 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_off(self) -> None: """Set the climate state to off.""" + self.raise_for_scope() with handle_command(): await self.wake_up_if_asleep() await self.api.auto_conditioning_stop() diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index eda3d26f341..d67a1bd1770 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -5,7 +5,7 @@ from typing import Any from tesla_fleet_api.exceptions import TeslaFleetError -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -83,6 +83,11 @@ class TeslemetryVehicleEntity(CoordinatorEntity[TeslemetryVehicleDataCoordinator self.coordinator.data[key] = value self.async_write_ha_state() + def raise_for_scope(self): + """Raise an error if a scope is not available.""" + if not self.scoped: + raise ServiceValidationError("Missing required scope") + class TeslemetryEnergyEntity(CoordinatorEntity[TeslemetryEnergyDataCoordinator]): """Parent class for Teslemetry Energy Entities.""" diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index d6f15e2e932..615156e6fdc 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -6,6 +6,7 @@ import asyncio from dataclasses import dataclass from tesla_fleet_api import EnergySpecific, VehicleSpecific +from tesla_fleet_api.const import Scope from .coordinator import ( TeslemetryEnergyDataCoordinator, @@ -19,6 +20,7 @@ class TeslemetryData: vehicles: list[TeslemetryVehicleData] energysites: list[TeslemetryEnergyData] + scopes: list[Scope] @dataclass diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index cced1090e2a..6380a4d0c71 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -58,7 +58,7 @@ SHIFT_STATES = {"P": "p", "D": "d", "R": "r", "N": "n"} class TeslemetrySensorEntityDescription(SensorEntityDescription): """Describes Teslemetry Sensor entity.""" - value_fn: Callable[[StateType], StateType | datetime] = lambda x: x + value_fn: Callable[[StateType], StateType] = lambda x: x VEHICLE_DESCRIPTIONS: tuple[TeslemetrySensorEntityDescription, ...] = ( @@ -447,12 +447,13 @@ class TeslemetryVehicleSensorEntity(TeslemetryVehicleEntity, SensorEntity): description: TeslemetrySensorEntityDescription, ) -> None: """Initialize the sensor.""" + self.entity_description = description super().__init__(vehicle, description.key) @property def native_value(self) -> StateType: """Return the state of the sensor.""" - return self._value + return self.entity_description.value_fn(self._value) class TeslemetryVehicleTimeSensorEntity(TeslemetryVehicleEntity, SensorEntity): diff --git a/homeassistant/components/tessie/lock.py b/homeassistant/components/tessie/lock.py index 09402055ee8..1e5653744fb 100644 --- a/homeassistant/components/tessie/lock.py +++ b/homeassistant/components/tessie/lock.py @@ -12,10 +12,14 @@ from tessie_api import ( unlock, ) +from homeassistant.components.automation import automations_with_entity from homeassistant.components.lock import ATTR_CODE, LockEntity +from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, TessieChargeCableLockStates @@ -29,11 +33,46 @@ async def async_setup_entry( """Set up the Tessie sensor platform from a config entry.""" data = hass.data[DOMAIN][entry.entry_id] - async_add_entities( + entities = [ klass(vehicle.state_coordinator) - for klass in (TessieLockEntity, TessieCableLockEntity, TessieSpeedLimitEntity) + for klass in (TessieLockEntity, TessieCableLockEntity) for vehicle in data - ) + ] + + ent_reg = er.async_get(hass) + + for vehicle in data: + entity_id = ent_reg.async_get_entity_id( + Platform.LOCK, + DOMAIN, + f"{vehicle.state_coordinator.vin}-vehicle_state_speed_limit_mode_active", + ) + if entity_id: + entity_entry = ent_reg.async_get(entity_id) + assert entity_entry + if entity_entry.disabled: + ent_reg.async_remove(entity_id) + else: + entities.append(TessieSpeedLimitEntity(vehicle.state_coordinator)) + + entity_automations = automations_with_entity(hass, entity_id) + entity_scripts = scripts_with_entity(hass, entity_id) + for item in entity_automations + entity_scripts: + ir.async_create_issue( + hass, + DOMAIN, + f"deprecated_speed_limit_{entity_id}_{item}", + breaks_in_ha_version="2024.11.0", + is_fixable=True, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_speed_limit_entity", + translation_placeholders={ + "entity": entity_id, + "info": item, + }, + ) + async_add_entities(entities) class TessieLockEntity(TessieEntity, LockEntity): @@ -81,6 +120,16 @@ class TessieSpeedLimitEntity(TessieEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Enable speed limit with pin.""" + ir.async_create_issue( + self.coordinator.hass, + DOMAIN, + "deprecated_speed_limit_locked", + breaks_in_ha_version="2024.11.0", + is_fixable=True, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_speed_limit_locked", + ) code: str | None = kwargs.get(ATTR_CODE) if code: await self.run(enable_speed_limit, pin=code) @@ -88,6 +137,16 @@ class TessieSpeedLimitEntity(TessieEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Disable speed limit with pin.""" + ir.async_create_issue( + self.coordinator.hass, + DOMAIN, + "deprecated_speed_limit_unlocked", + breaks_in_ha_version="2024.11.0", + is_fixable=True, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_speed_limit_unlocked", + ) code: str | None = kwargs.get(ATTR_CODE) if code: await self.run(disable_speed_limit, pin=code) diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index 8e1e47f934f..ea75660ddb7 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -410,5 +410,40 @@ "no_cable": { "message": "Insert cable to lock" } + }, + "issues": { + "deprecated_speed_limit_entity": { + "title": "Detected Tessie speed limit lock entity usage", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::tessie::issues::deprecated_speed_limit_entity::title%]", + "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\nHome Assistant detected that entity `{entity}` is being used in `{info}`\n\nYou should remove the speed limit lock entity from `{info}` then click submit to fix this issue." + } + } + } + }, + "deprecated_speed_limit_locked": { + "title": "Detected Tessie speed limit lock entity locked", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::tessie::issues::deprecated_speed_limit_locked::title%]", + "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\n\nPlease remove this entity from any automation or script, disable the entity then click submit to fix this issue." + } + } + } + }, + "deprecated_speed_limit_unlocked": { + "title": "Detected Tessie speed limit lock entity unlocked", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::tessie::issues::deprecated_speed_limit_unlocked::title%]", + "description": "The Tessie integration's speed limit lock entity has been deprecated and will be remove in 2024.11.0.\n\nPlease remove this entity from any automation or script, disable the entity then click submit to fix this issue." + } + } + } + } } } diff --git a/homeassistant/components/tibber/sensor.py b/homeassistant/components/tibber/sensor.py index da2fd881a54..7da0a2b7947 100644 --- a/homeassistant/components/tibber/sensor.py +++ b/homeassistant/components/tibber/sensor.py @@ -53,6 +53,8 @@ from homeassistant.util import Throttle, dt as dt_util from .const import DOMAIN as TIBBER_DOMAIN, MANUFACTURER +FIVE_YEARS = 5 * 365 * 24 + _LOGGER = logging.getLogger(__name__) ICON = "mdi:currency-usd" @@ -724,9 +726,16 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]): # pylint: disable=has None, {"sum"}, ) - first_stat = stat[statistic_id][0] - _sum = cast(float, first_stat["sum"]) - last_stats_time = first_stat["start"] + if statistic_id in stat: + first_stat = stat[statistic_id][0] + _sum = cast(float, first_stat["sum"]) + last_stats_time = first_stat["start"] + else: + hourly_data = await home.get_historic_data( + FIVE_YEARS, production=is_production + ) + _sum = 0.0 + last_stats_time = None statistics = [] diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index 436e3198650..1de9db1d319 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -4,9 +4,12 @@ from __future__ import annotations from total_connect_client import ArmingHelper from total_connect_client.exceptions import BadResultCodeError, UsercodeInvalid +from total_connect_client.location import TotalConnectLocation -import homeassistant.components.alarm_control_panel as alarm -from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntity, + AlarmControlPanelEntityFeature, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( STATE_ALARM_ARMED_AWAY, @@ -21,12 +24,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_platform -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import TotalConnectDataUpdateCoordinator from .const import DOMAIN +from .entity import TotalConnectLocationEntity SERVICE_ALARM_ARM_AWAY_INSTANT = "arm_away_instant" SERVICE_ALARM_ARM_HOME_INSTANT = "arm_home_instant" @@ -36,23 +38,17 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up TotalConnect alarm panels based on a config entry.""" - alarms: list[TotalConnectAlarm] = [] - coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - for location_id, location in coordinator.client.locations.items(): - location_name = location.location_name - alarms.extend( - TotalConnectAlarm( - coordinator=coordinator, - name=location_name, - location_id=location_id, - partition_id=partition_id, - ) - for partition_id in location.partitions + async_add_entities( + TotalConnectAlarm( + coordinator, + location, + partition_id, ) - - async_add_entities(alarms) + for location in coordinator.client.locations.values() + for partition_id in location.partitions + ) # Set up services platform = entity_platform.async_get_current_platform() @@ -70,10 +66,8 @@ async def async_setup_entry( ) -class TotalConnectAlarm( - CoordinatorEntity[TotalConnectDataUpdateCoordinator], alarm.AlarmControlPanelEntity -): - """Represent an TotalConnect status.""" +class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): + """Represent a TotalConnect alarm panel.""" _attr_supported_features = ( AlarmControlPanelEntityFeature.ARM_HOME @@ -84,19 +78,13 @@ class TotalConnectAlarm( def __init__( self, coordinator: TotalConnectDataUpdateCoordinator, - name, - location_id, - partition_id, + location: TotalConnectLocation, + partition_id: int, ) -> None: """Initialize the TotalConnect status.""" - super().__init__(coordinator) - self._location_id = location_id - self._location = coordinator.client.locations[location_id] + super().__init__(coordinator, location) self._partition_id = partition_id self._partition = self._location.partitions[partition_id] - self._device = self._location.devices[self._location.security_device_id] - self._state: str | None = None - self._attr_extra_state_attributes = {} """ Set unique_id to location_id for partition 1 to avoid breaking change @@ -104,27 +92,18 @@ class TotalConnectAlarm( Add _# for partition 2 and beyond. """ if partition_id == 1: - self._attr_name = name - self._attr_unique_id = f"{location_id}" + self._attr_name = None + self._attr_unique_id = str(location.location_id) else: - self._attr_name = f"{name} partition {partition_id}" - self._attr_unique_id = f"{location_id}_{partition_id}" - - @property - def device_info(self) -> DeviceInfo: - """Return device info.""" - return DeviceInfo( - identifiers={(DOMAIN, self._device.serial_number)}, - name=self._device.name, - serial_number=self._device.serial_number, - ) + self._attr_translation_key = "partition" + self._attr_translation_placeholders = {"partition_id": str(partition_id)} + self._attr_unique_id = f"{location.location_id}_{partition_id}" @property def state(self) -> str | None: """Return the state of the device.""" attr = { - "location_name": self.name, - "location_id": self._location_id, + "location_id": self._location.location_id, "partition": self._partition_id, "ac_loss": self._location.ac_loss, "low_battery": self._location.low_battery, @@ -133,6 +112,11 @@ class TotalConnectAlarm( "triggered_zone": None, } + if self._partition_id == 1: + attr["location_name"] = self.device.name + else: + attr["location_name"] = f"{self.device.name} partition {self._partition_id}" + state: str | None = None if self._partition.arming_state.is_disarmed(): state = STATE_ALARM_DISARMED @@ -158,10 +142,9 @@ class TotalConnectAlarm( state = STATE_ALARM_TRIGGERED attr["triggered_source"] = "Carbon Monoxide" - self._state = state self._attr_extra_state_attributes = attr - return self._state + return state async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" @@ -174,7 +157,7 @@ class TotalConnectAlarm( ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to disarm {self.name}." + f"TotalConnect failed to disarm {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -193,7 +176,7 @@ class TotalConnectAlarm( ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm home {self.name}." + f"TotalConnect failed to arm home {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -212,7 +195,7 @@ class TotalConnectAlarm( ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm away {self.name}." + f"TotalConnect failed to arm away {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -231,7 +214,7 @@ class TotalConnectAlarm( ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm night {self.name}." + f"TotalConnect failed to arm night {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -250,7 +233,7 @@ class TotalConnectAlarm( ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm home instant {self.name}." + f"TotalConnect failed to arm home instant {self.device.name}." ) from error await self.coordinator.async_request_refresh() @@ -269,7 +252,7 @@ class TotalConnectAlarm( ) from error except BadResultCodeError as error: raise HomeAssistantError( - f"TotalConnect failed to arm away instant {self.name}." + f"TotalConnect failed to arm away instant {self.device.name}." ) from error await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/totalconnect/binary_sensor.py b/homeassistant/components/totalconnect/binary_sensor.py index 6043d15d2d4..85461805124 100644 --- a/homeassistant/components/totalconnect/binary_sensor.py +++ b/homeassistant/components/totalconnect/binary_sensor.py @@ -1,7 +1,12 @@ """Interfaces with TotalConnect sensors.""" +from collections.abc import Callable +from dataclasses import dataclass import logging +from total_connect_client.location import TotalConnectLocation +from total_connect_client.zone import TotalConnectZone + from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, @@ -10,10 +15,11 @@ from homeassistant.components.binary_sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import TotalConnectDataUpdateCoordinator from .const import DOMAIN +from .entity import TotalConnectLocationEntity, TotalConnectZoneEntity LOW_BATTERY = "low_battery" TAMPER = "tamper" @@ -23,172 +29,172 @@ ZONE = "zone" _LOGGER = logging.getLogger(__name__) +@dataclass(frozen=True, kw_only=True) +class TotalConnectZoneBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes TotalConnect binary sensor entity.""" + + device_class_fn: Callable[[TotalConnectZone], BinarySensorDeviceClass] | None = None + is_on_fn: Callable[[TotalConnectZone], bool] + + +def get_security_zone_device_class(zone: TotalConnectZone) -> BinarySensorDeviceClass: + """Return the device class of a TotalConnect security zone.""" + if zone.is_type_fire(): + return BinarySensorDeviceClass.SMOKE + if zone.is_type_carbon_monoxide(): + return BinarySensorDeviceClass.GAS + if zone.is_type_motion(): + return BinarySensorDeviceClass.MOTION + if zone.is_type_medical(): + return BinarySensorDeviceClass.SAFETY + if zone.is_type_temperature(): + return BinarySensorDeviceClass.PROBLEM + return BinarySensorDeviceClass.DOOR + + +SECURITY_BINARY_SENSOR = TotalConnectZoneBinarySensorEntityDescription( + key=ZONE, + name=None, + device_class_fn=get_security_zone_device_class, + is_on_fn=lambda zone: zone.is_faulted() or zone.is_triggered(), +) + +NO_BUTTON_BINARY_SENSORS: tuple[TotalConnectZoneBinarySensorEntityDescription, ...] = ( + TotalConnectZoneBinarySensorEntityDescription( + key=LOW_BATTERY, + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + is_on_fn=lambda zone: zone.is_low_battery(), + ), + TotalConnectZoneBinarySensorEntityDescription( + key=TAMPER, + device_class=BinarySensorDeviceClass.TAMPER, + entity_category=EntityCategory.DIAGNOSTIC, + is_on_fn=lambda zone: zone.is_tampered(), + ), +) + + +@dataclass(frozen=True, kw_only=True) +class TotalConnectAlarmBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes TotalConnect binary sensor entity.""" + + is_on_fn: Callable[[TotalConnectLocation], bool] + + +LOCATION_BINARY_SENSORS: tuple[TotalConnectAlarmBinarySensorEntityDescription, ...] = ( + TotalConnectAlarmBinarySensorEntityDescription( + key=LOW_BATTERY, + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + is_on_fn=lambda location: location.is_low_battery(), + ), + TotalConnectAlarmBinarySensorEntityDescription( + key=TAMPER, + device_class=BinarySensorDeviceClass.TAMPER, + entity_category=EntityCategory.DIAGNOSTIC, + is_on_fn=lambda location: location.is_cover_tampered(), + ), + TotalConnectAlarmBinarySensorEntityDescription( + key=POWER, + device_class=BinarySensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + is_on_fn=lambda location: location.is_ac_loss(), + ), +) + + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up TotalConnect device sensors based on a config entry.""" sensors: list = [] - client_locations = hass.data[DOMAIN][entry.entry_id].client.locations + coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + + client_locations = coordinator.client.locations for location_id, location in client_locations.items(): - sensors.append(TotalConnectAlarmLowBatteryBinarySensor(location)) - sensors.append(TotalConnectAlarmTamperBinarySensor(location)) - sensors.append(TotalConnectAlarmPowerBinarySensor(location)) + sensors.extend( + TotalConnectAlarmBinarySensor(coordinator, description, location) + for description in LOCATION_BINARY_SENSORS + ) for zone in location.zones.values(): - sensors.append(TotalConnectZoneSecurityBinarySensor(location_id, zone)) + sensors.append( + TotalConnectZoneBinarySensor( + coordinator, SECURITY_BINARY_SENSOR, zone, location_id + ) + ) if not zone.is_type_button(): - sensors.append(TotalConnectLowBatteryBinarySensor(location_id, zone)) - sensors.append(TotalConnectTamperBinarySensor(location_id, zone)) + sensors.extend( + TotalConnectZoneBinarySensor( + coordinator, + description, + zone, + location_id, + ) + for description in NO_BUTTON_BINARY_SENSORS + ) - async_add_entities(sensors, True) + async_add_entities(sensors) -class TotalConnectZoneBinarySensor(BinarySensorEntity): - """Represent an TotalConnect zone.""" +class TotalConnectZoneBinarySensor(TotalConnectZoneEntity, BinarySensorEntity): + """Represent a TotalConnect zone.""" - def __init__(self, location_id, zone): + entity_description: TotalConnectZoneBinarySensorEntityDescription + + def __init__( + self, + coordinator: TotalConnectDataUpdateCoordinator, + entity_description: TotalConnectZoneBinarySensorEntityDescription, + zone: TotalConnectZone, + location_id: str, + ) -> None: """Initialize the TotalConnect status.""" - self._location_id = location_id - self._zone = zone - self._attr_name = f"{zone.description}{self.entity_description.name}" - self._attr_unique_id = ( - f"{location_id}_{zone.zoneid}_{self.entity_description.key}" - ) - self._attr_is_on = None + super().__init__(coordinator, zone, location_id, entity_description.key) + self.entity_description = entity_description self._attr_extra_state_attributes = { - "zone_id": self._zone.zoneid, - "location_id": self._location_id, - "partition": self._zone.partition, + "zone_id": zone.zoneid, + "location_id": location_id, + "partition": zone.partition, } @property - def device_info(self) -> DeviceInfo: - """Return device info.""" - identifier = self._zone.sensor_serial_number or f"zone_{self._zone.zoneid}" - return DeviceInfo( - name=self._zone.description, - identifiers={(DOMAIN, identifier)}, - serial_number=self._zone.sensor_serial_number, - ) - - -class TotalConnectZoneSecurityBinarySensor(TotalConnectZoneBinarySensor): - """Represent an TotalConnect security zone.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=ZONE, name="" - ) + def is_on(self) -> bool: + """Return the state of the entity.""" + return self.entity_description.is_on_fn(self._zone) @property - def device_class(self): + def device_class(self) -> BinarySensorDeviceClass | None: """Return the class of this zone.""" - if self._zone.is_type_fire(): - return BinarySensorDeviceClass.SMOKE - if self._zone.is_type_carbon_monoxide(): - return BinarySensorDeviceClass.GAS - if self._zone.is_type_motion(): - return BinarySensorDeviceClass.MOTION - if self._zone.is_type_medical(): - return BinarySensorDeviceClass.SAFETY - if self._zone.is_type_temperature(): - return BinarySensorDeviceClass.PROBLEM - return BinarySensorDeviceClass.DOOR - - def update(self): - """Return the state of the device.""" - if self._zone.is_faulted() or self._zone.is_triggered(): - self._attr_is_on = True - else: - self._attr_is_on = False + if self.entity_description.device_class_fn: + return self.entity_description.device_class_fn(self._zone) + return super().device_class -class TotalConnectLowBatteryBinarySensor(TotalConnectZoneBinarySensor): - """Represent an TotalConnect zone low battery status.""" +class TotalConnectAlarmBinarySensor(TotalConnectLocationEntity, BinarySensorEntity): + """Represent a TotalConnect alarm device binary sensors.""" - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=LOW_BATTERY, - device_class=BinarySensorDeviceClass.BATTERY, - entity_category=EntityCategory.DIAGNOSTIC, - name=" low battery", - ) + entity_description: TotalConnectAlarmBinarySensorEntityDescription - def update(self): - """Return the state of the device.""" - self._attr_is_on = self._zone.is_low_battery() - - -class TotalConnectTamperBinarySensor(TotalConnectZoneBinarySensor): - """Represent an TotalConnect zone tamper status.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=TAMPER, - device_class=BinarySensorDeviceClass.TAMPER, - entity_category=EntityCategory.DIAGNOSTIC, - name=f" {TAMPER}", - ) - - def update(self): - """Return the state of the device.""" - self._attr_is_on = self._zone.is_tampered() - - -class TotalConnectAlarmBinarySensor(BinarySensorEntity): - """Represent an TotalConnect alarm device binary sensors.""" - - def __init__(self, location): + def __init__( + self, + coordinator: TotalConnectDataUpdateCoordinator, + entity_description: TotalConnectAlarmBinarySensorEntityDescription, + location: TotalConnectLocation, + ) -> None: """Initialize the TotalConnect alarm device binary sensor.""" - self._location = location - self._attr_name = f"{location.location_name}{self.entity_description.name}" - self._attr_unique_id = f"{location.location_id}_{self.entity_description.key}" - self._attr_is_on = None + super().__init__(coordinator, location) + self.entity_description = entity_description + self._attr_unique_id = f"{location.location_id}_{entity_description.key}" self._attr_extra_state_attributes = { - "location_id": self._location.location_id, + "location_id": location.location_id, } - -class TotalConnectAlarmLowBatteryBinarySensor(TotalConnectAlarmBinarySensor): - """Represent an TotalConnect Alarm low battery status.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=LOW_BATTERY, - device_class=BinarySensorDeviceClass.BATTERY, - entity_category=EntityCategory.DIAGNOSTIC, - name=" low battery", - ) - - def update(self): - """Return the state of the device.""" - self._attr_is_on = self._location.is_low_battery() - - -class TotalConnectAlarmTamperBinarySensor(TotalConnectAlarmBinarySensor): - """Represent an TotalConnect alarm tamper status.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=TAMPER, - device_class=BinarySensorDeviceClass.TAMPER, - entity_category=EntityCategory.DIAGNOSTIC, - name=f" {TAMPER}", - ) - - def update(self): - """Return the state of the device.""" - self._attr_is_on = self._location.is_cover_tampered() - - -class TotalConnectAlarmPowerBinarySensor(TotalConnectAlarmBinarySensor): - """Represent an TotalConnect alarm power status.""" - - entity_description: BinarySensorEntityDescription = BinarySensorEntityDescription( - key=POWER, - device_class=BinarySensorDeviceClass.POWER, - entity_category=EntityCategory.DIAGNOSTIC, - name=f" {POWER}", - ) - - def update(self): - """Return the state of the device.""" - self._attr_is_on = not self._location.is_ac_loss() + @property + def is_on(self) -> bool: + """Return the state of the entity.""" + return self.entity_description.is_on_fn(self._location) diff --git a/homeassistant/components/totalconnect/entity.py b/homeassistant/components/totalconnect/entity.py new file mode 100644 index 00000000000..a18ffc14df5 --- /dev/null +++ b/homeassistant/components/totalconnect/entity.py @@ -0,0 +1,57 @@ +"""Base class for TotalConnect entities.""" + +from total_connect_client.location import TotalConnectLocation +from total_connect_client.zone import TotalConnectZone + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import DOMAIN, TotalConnectDataUpdateCoordinator + + +class TotalConnectEntity(CoordinatorEntity[TotalConnectDataUpdateCoordinator]): + """Represent a TotalConnect entity.""" + + _attr_has_entity_name = True + + +class TotalConnectLocationEntity(TotalConnectEntity): + """Represent a TotalConnect location.""" + + def __init__( + self, + coordinator: TotalConnectDataUpdateCoordinator, + location: TotalConnectLocation, + ) -> None: + """Initialize the TotalConnect location.""" + super().__init__(coordinator) + self._location = location + self.device = device = location.devices[location.security_device_id] + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device.serial_number)}, + name=device.name, + serial_number=device.serial_number, + ) + + +class TotalConnectZoneEntity(TotalConnectEntity): + """Represent a TotalConnect zone.""" + + def __init__( + self, + coordinator: TotalConnectDataUpdateCoordinator, + zone: TotalConnectZone, + location_id: str, + key: str, + ) -> None: + """Initialize the TotalConnect zone.""" + super().__init__(coordinator) + self._location_id = location_id + self._zone = zone + self._attr_unique_id = f"{location_id}_{zone.zoneid}_{key}" + identifier = zone.sensor_serial_number or f"zone_{zone.zoneid}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, identifier)}, + name=zone.description, + serial_number=zone.sensor_serial_number, + ) diff --git a/homeassistant/components/totalconnect/manifest.json b/homeassistant/components/totalconnect/manifest.json index 183919f05f2..d1afb01210d 100644 --- a/homeassistant/components/totalconnect/manifest.json +++ b/homeassistant/components/totalconnect/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/totalconnect", "iot_class": "cloud_polling", "loggers": ["total_connect_client"], - "requirements": ["total-connect-client==2023.2"] + "requirements": ["total-connect-client==2023.12.1"] } diff --git a/homeassistant/components/totalconnect/strings.json b/homeassistant/components/totalconnect/strings.json index 922962c9866..03656b60084 100644 --- a/homeassistant/components/totalconnect/strings.json +++ b/homeassistant/components/totalconnect/strings.json @@ -49,5 +49,12 @@ "name": "Arm home instant", "description": "Arms Home with zero entry delay." } + }, + "entity": { + "alarm_control_panel": { + "partition": { + "name": "Partition {partition_id}" + } + } } } diff --git a/homeassistant/components/traccar_server/__init__.py b/homeassistant/components/traccar_server/__init__.py index 703df6cbfa4..c7a65d2d4a8 100644 --- a/homeassistant/components/traccar_server/__init__.py +++ b/homeassistant/components/traccar_server/__init__.py @@ -30,7 +30,11 @@ from .const import ( ) from .coordinator import TraccarServerCoordinator -PLATFORMS: list[Platform] = [Platform.DEVICE_TRACKER, Platform.SENSOR] +PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.DEVICE_TRACKER, + Platform.SENSOR, +] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/traccar_server/binary_sensor.py b/homeassistant/components/traccar_server/binary_sensor.py new file mode 100644 index 00000000000..6ee5757dcea --- /dev/null +++ b/homeassistant/components/traccar_server/binary_sensor.py @@ -0,0 +1,99 @@ +"""Support for Traccar server binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Generic, Literal, TypeVar, cast + +from pytraccar import DeviceModel + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import TraccarServerCoordinator +from .entity import TraccarServerEntity + +_T = TypeVar("_T") + + +@dataclass(frozen=True, kw_only=True) +class TraccarServerBinarySensorEntityDescription( + Generic[_T], BinarySensorEntityDescription +): + """Describe Traccar Server sensor entity.""" + + data_key: Literal["position", "device", "geofence", "attributes"] + entity_registry_enabled_default = False + entity_category = EntityCategory.DIAGNOSTIC + value_fn: Callable[[_T], bool | None] + + +TRACCAR_SERVER_BINARY_SENSOR_ENTITY_DESCRIPTIONS = ( + TraccarServerBinarySensorEntityDescription[DeviceModel]( + key="attributes.motion", + data_key="position", + translation_key="motion", + device_class=BinarySensorDeviceClass.MOTION, + value_fn=lambda x: x["attributes"].get("motion", False), + ), + TraccarServerBinarySensorEntityDescription[DeviceModel]( + key="status", + data_key="device", + translation_key="status", + value_fn=lambda x: None if (s := x["status"]) == "unknown" else s == "online", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up binary sensor entities.""" + coordinator: TraccarServerCoordinator = hass.data[DOMAIN][entry.entry_id] + async_add_entities( + TraccarServerBinarySensor( + coordinator=coordinator, + device=entry["device"], + description=cast(TraccarServerBinarySensorEntityDescription, description), + ) + for entry in coordinator.data.values() + for description in TRACCAR_SERVER_BINARY_SENSOR_ENTITY_DESCRIPTIONS + ) + + +class TraccarServerBinarySensor(TraccarServerEntity, BinarySensorEntity): + """Represent a traccar server binary sensor.""" + + _attr_has_entity_name = True + entity_description: TraccarServerBinarySensorEntityDescription + + def __init__( + self, + coordinator: TraccarServerCoordinator, + device: DeviceModel, + description: TraccarServerBinarySensorEntityDescription[_T], + ) -> None: + """Initialize the Traccar Server sensor.""" + super().__init__(coordinator, device) + self.entity_description = description + self._attr_unique_id = ( + f"{device['uniqueId']}_{description.data_key}_{description.key}" + ) + + @property + def is_on(self) -> bool | None: + """Return if the binary sensor is on or not.""" + return self.entity_description.value_fn( + getattr(self, f"traccar_{self.entity_description.data_key}") + ) diff --git a/homeassistant/components/traccar_server/device_tracker.py b/homeassistant/components/traccar_server/device_tracker.py index d15ba084dad..e7dba3ad99d 100644 --- a/homeassistant/components/traccar_server/device_tracker.py +++ b/homeassistant/components/traccar_server/device_tracker.py @@ -9,14 +9,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import ( - ATTR_CATEGORY, - ATTR_MOTION, - ATTR_STATUS, - ATTR_TRACCAR_ID, - ATTR_TRACKER, - DOMAIN, -) +from .const import ATTR_CATEGORY, ATTR_TRACCAR_ID, ATTR_TRACKER, DOMAIN from .coordinator import TraccarServerCoordinator from .entity import TraccarServerEntity @@ -46,8 +39,6 @@ class TraccarServerDeviceTracker(TraccarServerEntity, TrackerEntity): return { **self.traccar_attributes, ATTR_CATEGORY: self.traccar_device["category"], - ATTR_MOTION: self.traccar_position["attributes"].get("motion", False), - ATTR_STATUS: self.traccar_device["status"], ATTR_TRACCAR_ID: self.traccar_device["id"], ATTR_TRACKER: DOMAIN, } diff --git a/homeassistant/components/traccar_server/diagnostics.py b/homeassistant/components/traccar_server/diagnostics.py index 80dc7a9c7cd..68f1e4fca8a 100644 --- a/homeassistant/components/traccar_server/diagnostics.py +++ b/homeassistant/components/traccar_server/diagnostics.py @@ -57,7 +57,7 @@ async def async_get_config_entry_diagnostics( "coordinator_data": coordinator.data, "entities": [ { - "enity_id": entity.entity_id, + "entity_id": entity.entity_id, "disabled": entity.disabled, "unit_of_measurement": entity.unit_of_measurement, "state": _entity_state(hass, entity, coordinator), @@ -92,7 +92,7 @@ async def async_get_device_diagnostics( "coordinator_data": coordinator.data, "entities": [ { - "enity_id": entity.entity_id, + "entity_id": entity.entity_id, "disabled": entity.disabled, "unit_of_measurement": entity.unit_of_measurement, "state": _entity_state(hass, entity, coordinator), diff --git a/homeassistant/components/traccar_server/icons.json b/homeassistant/components/traccar_server/icons.json index 59fc663e712..a10b154fbff 100644 --- a/homeassistant/components/traccar_server/icons.json +++ b/homeassistant/components/traccar_server/icons.json @@ -1,5 +1,14 @@ { "entity": { + "binary_sensor": { + "status": { + "default": "mdi:access-point-minus", + "state": { + "off": "mdi:access-point-off", + "on": "mdi:access-point" + } + } + }, "sensor": { "altitude": { "default": "mdi:altimeter" diff --git a/homeassistant/components/traccar_server/strings.json b/homeassistant/components/traccar_server/strings.json index 41adaace77e..8bec4b112ac 100644 --- a/homeassistant/components/traccar_server/strings.json +++ b/homeassistant/components/traccar_server/strings.json @@ -43,6 +43,22 @@ } }, "entity": { + "binary_sensor": { + "motion": { + "name": "Motion", + "state": { + "off": "Stopped", + "on": "Moving" + } + }, + "status": { + "name": "Status", + "state": { + "off": "Offline", + "on": "Online" + } + } + }, "sensor": { "address": { "name": "Address" diff --git a/homeassistant/components/unifi/__init__.py b/homeassistant/components/unifi/__init__.py index 5174a1a7796..69a6ec423ae 100644 --- a/homeassistant/components/unifi/__init__.py +++ b/homeassistant/components/unifi/__init__.py @@ -7,6 +7,7 @@ from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.storage import Store from homeassistant.helpers.typing import ConfigType @@ -73,6 +74,18 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> return await hub.async_reset() +async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry +) -> bool: + """Remove config entry from a device.""" + hub: UnifiHub = hass.data[UNIFI_DOMAIN][config_entry.entry_id] + return not any( + identifier + for _, identifier in device_entry.connections + if identifier in hub.api.clients or identifier in hub.api.devices + ) + + class UnifiWirelessClients: """Class to store clients known to be wireless. diff --git a/homeassistant/components/unifi/device_tracker.py b/homeassistant/components/unifi/device_tracker.py index a41d1942536..dc48b9c31fe 100644 --- a/homeassistant/components/unifi/device_tracker.py +++ b/homeassistant/components/unifi/device_tracker.py @@ -240,7 +240,7 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): self._ignore_events = False self._is_connected = description.is_connected_fn(self.hub, self._obj_id) if self.is_connected: - self.hub.async_heartbeat( + self.hub.update_heartbeat( self.unique_id, dt_util.utcnow() + description.heartbeat_timedelta_fn(self.hub, self._obj_id), @@ -301,12 +301,12 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): # From unifi.entity.async_signal_reachable_callback # Controller connection state has changed and entity is unavailable # Cancel heartbeat - self.hub.async_heartbeat(self.unique_id) + self.hub.remove_heartbeat(self.unique_id) return if is_connected := description.is_connected_fn(self.hub, self._obj_id): self._is_connected = is_connected - self.hub.async_heartbeat( + self.hub.update_heartbeat( self.unique_id, dt_util.utcnow() + description.heartbeat_timedelta_fn(self.hub, self._obj_id), @@ -319,12 +319,12 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): return if event.key in self._event_is_on: - self.hub.async_heartbeat(self.unique_id) + self.hub.remove_heartbeat(self.unique_id) self._is_connected = True self.async_write_ha_state() return - self.hub.async_heartbeat( + self.hub.update_heartbeat( self.unique_id, dt_util.utcnow() + self.entity_description.heartbeat_timedelta_fn(self.hub, self._obj_id), @@ -344,7 +344,7 @@ class UnifiScannerEntity(UnifiEntity[HandlerT, ApiItemT], ScannerEntity): async def async_will_remove_from_hass(self) -> None: """Disconnect object when removed.""" await super().async_will_remove_from_hass() - self.hub.async_heartbeat(self.unique_id) + self.hub.remove_heartbeat(self.unique_id) @property def extra_state_attributes(self) -> Mapping[str, Any] | None: diff --git a/homeassistant/components/unifi/hub/entity_helper.py b/homeassistant/components/unifi/hub/entity_helper.py new file mode 100644 index 00000000000..c4bcf237386 --- /dev/null +++ b/homeassistant/components/unifi/hub/entity_helper.py @@ -0,0 +1,156 @@ +"""UniFi Network entity helper.""" + +from __future__ import annotations + +from datetime import datetime, timedelta + +import aiounifi +from aiounifi.models.device import DeviceSetPoePortModeRequest + +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.event import async_call_later, async_track_time_interval +import homeassistant.util.dt as dt_util + + +class UnifiEntityHelper: + """UniFi Network integration handling platforms for entity registration.""" + + def __init__(self, hass: HomeAssistant, api: aiounifi.Controller) -> None: + """Initialize the UniFi entity loader.""" + self.hass = hass + self.api = api + + self._device_command = UnifiDeviceCommand(hass, api) + self._heartbeat = UnifiEntityHeartbeat(hass) + + @callback + def reset(self) -> None: + """Cancel timers.""" + self._device_command.reset() + self._heartbeat.reset() + + @callback + def initialize(self) -> None: + """Initialize entity helper.""" + self._heartbeat.initialize() + + @property + def signal_heartbeat(self) -> str: + """Event to signal new heartbeat missed.""" + return self._heartbeat.signal + + @callback + def update_heartbeat(self, unique_id: str, heartbeat_expire_time: datetime) -> None: + """Update device time in heartbeat monitor.""" + self._heartbeat.update(unique_id, heartbeat_expire_time) + + @callback + def remove_heartbeat(self, unique_id: str) -> None: + """Update device time in heartbeat monitor.""" + self._heartbeat.remove(unique_id) + + @callback + def queue_poe_port_command( + self, device_id: str, port_idx: int, poe_mode: str + ) -> None: + """Queue commands to execute them together per device.""" + self._device_command.queue_poe_command(device_id, port_idx, poe_mode) + + +class UnifiEntityHeartbeat: + """UniFi entity heartbeat monitor.""" + + CHECK_HEARTBEAT_INTERVAL = timedelta(seconds=1) + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the heartbeat monitor.""" + self.hass = hass + + self._cancel_heartbeat_check: CALLBACK_TYPE | None = None + self._heartbeat_time: dict[str, datetime] = {} + + @callback + def reset(self) -> None: + """Cancel timers.""" + if self._cancel_heartbeat_check: + self._cancel_heartbeat_check() + self._cancel_heartbeat_check = None + + @callback + def initialize(self) -> None: + """Initialize heartbeat monitor.""" + self._cancel_heartbeat_check = async_track_time_interval( + self.hass, self._check_for_stale, self.CHECK_HEARTBEAT_INTERVAL + ) + + @property + def signal(self) -> str: + """Event to signal new heartbeat missed.""" + return "unifi-heartbeat-missed" + + @callback + def update(self, unique_id: str, heartbeat_expire_time: datetime) -> None: + """Update device time in heartbeat monitor.""" + self._heartbeat_time[unique_id] = heartbeat_expire_time + + @callback + def remove(self, unique_id: str) -> None: + """Remove device from heartbeat monitor.""" + self._heartbeat_time.pop(unique_id, None) + + @callback + def _check_for_stale(self, *_: datetime) -> None: + """Check for any devices scheduled to be marked disconnected.""" + now = dt_util.utcnow() + + unique_ids_to_remove = [] + for unique_id, heartbeat_expire_time in self._heartbeat_time.items(): + if now > heartbeat_expire_time: + async_dispatcher_send(self.hass, f"{self.signal}_{unique_id}") + unique_ids_to_remove.append(unique_id) + + for unique_id in unique_ids_to_remove: + del self._heartbeat_time[unique_id] + + +class UnifiDeviceCommand: + """UniFi Device command helper class.""" + + COMMAND_DELAY = 5 + + def __init__(self, hass: HomeAssistant, api: aiounifi.Controller) -> None: + """Initialize device command helper.""" + self.hass = hass + self.api = api + + self._command_queue: dict[str, dict[int, str]] = {} + self._cancel_command: CALLBACK_TYPE | None = None + + @callback + def reset(self) -> None: + """Cancel timers.""" + if self._cancel_command: + self._cancel_command() + self._cancel_command = None + + @callback + def queue_poe_command(self, device_id: str, port_idx: int, poe_mode: str) -> None: + """Queue commands to execute them together per device.""" + self.reset() + + device_queue = self._command_queue.setdefault(device_id, {}) + device_queue[port_idx] = poe_mode + + async def _command(now: datetime) -> None: + """Execute previously queued commands.""" + queue = self._command_queue.copy() + self._command_queue.clear() + for device_id, device_commands in queue.items(): + device = self.api.devices[device_id] + commands = list(device_commands.items()) + await self.api.request( + DeviceSetPoePortModeRequest.create(device, targets=commands) + ) + + self._cancel_command = async_call_later(self.hass, self.COMMAND_DELAY, _command) diff --git a/homeassistant/components/unifi/hub/hub.py b/homeassistant/components/unifi/hub/hub.py index df91584f267..f8c1f2517a2 100644 --- a/homeassistant/components/unifi/hub/hub.py +++ b/homeassistant/components/unifi/hub/hub.py @@ -2,13 +2,12 @@ from __future__ import annotations -from datetime import datetime, timedelta +from datetime import datetime import aiounifi -from aiounifi.models.device import DeviceSetPoePortModeRequest from homeassistant.config_entries import ConfigEntry -from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import ( DeviceEntry, @@ -16,16 +15,13 @@ from homeassistant.helpers.device_registry import ( DeviceInfo, ) from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.event import async_call_later, async_track_time_interval -import homeassistant.util.dt as dt_util from ..const import ATTR_MANUFACTURER, CONF_SITE_ID, DOMAIN as UNIFI_DOMAIN, PLATFORMS from .config import UnifiConfig +from .entity_helper import UnifiEntityHelper from .entity_loader import UnifiEntityLoader from .websocket import UnifiWebsocket -CHECK_HEARTBEAT_INTERVAL = timedelta(seconds=1) - class UnifiHub: """Manages a single UniFi Network instance.""" @@ -38,17 +34,12 @@ class UnifiHub: self.api = api self.config = UnifiConfig.from_config_entry(config_entry) self.entity_loader = UnifiEntityLoader(self) + self._entity_helper = UnifiEntityHelper(hass, api) self.websocket = UnifiWebsocket(hass, api, self.signal_reachable) self.site = config_entry.data[CONF_SITE_ID] self.is_admin = False - self._cancel_heartbeat_check: CALLBACK_TYPE | None = None - self._heartbeat_time: dict[str, datetime] = {} - - self.poe_command_queue: dict[str, dict[int, str]] = {} - self._cancel_poe_command: CALLBACK_TYPE | None = None - @callback @staticmethod def get_hub(hass: HomeAssistant, config_entry: ConfigEntry) -> UnifiHub: @@ -61,6 +52,28 @@ class UnifiHub: """Websocket connection state.""" return self.websocket.available + @property + def signal_heartbeat_missed(self) -> str: + """Event to signal new heartbeat missed.""" + return self._entity_helper.signal_heartbeat + + @callback + def update_heartbeat(self, unique_id: str, heartbeat_expire_time: datetime) -> None: + """Update device time in heartbeat monitor.""" + self._entity_helper.update_heartbeat(unique_id, heartbeat_expire_time) + + @callback + def remove_heartbeat(self, unique_id: str) -> None: + """Update device time in heartbeat monitor.""" + self._entity_helper.remove_heartbeat(unique_id) + + @callback + def queue_poe_port_command( + self, device_id: str, port_idx: int, poe_mode: str + ) -> None: + """Queue commands to execute them together per device.""" + self._entity_helper.queue_poe_port_command(device_id, port_idx, poe_mode) + @property def signal_reachable(self) -> str: """Integration specific event to signal a change in connection status.""" @@ -71,77 +84,16 @@ class UnifiHub: """Event specific per UniFi entry to signal new options.""" return f"unifi-options-{self.config.entry.entry_id}" - @property - def signal_heartbeat_missed(self) -> str: - """Event specific per UniFi device tracker to signal new heartbeat missed.""" - return "unifi-heartbeat-missed" - async def initialize(self) -> None: """Set up a UniFi Network instance.""" await self.entity_loader.initialize() + self._entity_helper.initialize() assert self.config.entry.unique_id is not None self.is_admin = self.api.sites[self.config.entry.unique_id].role == "admin" self.config.entry.add_update_listener(self.async_config_entry_updated) - self._cancel_heartbeat_check = async_track_time_interval( - self.hass, self._async_check_for_stale, CHECK_HEARTBEAT_INTERVAL - ) - - @callback - def async_heartbeat( - self, unique_id: str, heartbeat_expire_time: datetime | None = None - ) -> None: - """Signal when a device has fresh home state.""" - if heartbeat_expire_time is not None: - self._heartbeat_time[unique_id] = heartbeat_expire_time - return - - if unique_id in self._heartbeat_time: - del self._heartbeat_time[unique_id] - - @callback - def _async_check_for_stale(self, *_: datetime) -> None: - """Check for any devices scheduled to be marked disconnected.""" - now = dt_util.utcnow() - - unique_ids_to_remove = [] - for unique_id, heartbeat_expire_time in self._heartbeat_time.items(): - if now > heartbeat_expire_time: - async_dispatcher_send( - self.hass, f"{self.signal_heartbeat_missed}_{unique_id}" - ) - unique_ids_to_remove.append(unique_id) - - for unique_id in unique_ids_to_remove: - del self._heartbeat_time[unique_id] - - @callback - def async_queue_poe_port_command( - self, device_id: str, port_idx: int, poe_mode: str - ) -> None: - """Queue commands to execute them together per device.""" - if self._cancel_poe_command: - self._cancel_poe_command() - self._cancel_poe_command = None - - device_queue = self.poe_command_queue.setdefault(device_id, {}) - device_queue[port_idx] = poe_mode - - async def async_execute_command(now: datetime) -> None: - """Execute previously queued commands.""" - queue = self.poe_command_queue.copy() - self.poe_command_queue.clear() - for device_id, device_commands in queue.items(): - device = self.api.devices[device_id] - commands = list(device_commands.items()) - await self.api.request( - DeviceSetPoePortModeRequest.create(device, targets=commands) - ) - - self._cancel_poe_command = async_call_later(self.hass, 5, async_execute_command) - @property def device_info(self) -> DeviceInfo: """UniFi Network device info.""" @@ -205,12 +157,6 @@ class UnifiHub: if not unload_ok: return False - if self._cancel_heartbeat_check: - self._cancel_heartbeat_check() - self._cancel_heartbeat_check = None - - if self._cancel_poe_command: - self._cancel_poe_command() - self._cancel_poe_command = None + self._entity_helper.reset() return True diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 05dc2189908..982d654c8fe 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["aiounifi"], "quality_scale": "platinum", - "requirements": ["aiounifi==74"], + "requirements": ["aiounifi==76"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 360f40384c9..17b3cae93fd 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -239,6 +239,42 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( unique_id_fn=lambda hub, obj_id: f"poe_power-{obj_id}", value_fn=lambda _, obj: obj.poe_power if obj.poe_mode != "off" else "0", ), + UnifiSensorEntityDescription[Ports, Port]( + key="Port Bandwidth sensor RX", + device_class=SensorDeviceClass.DATA_RATE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, + suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND, + icon="mdi:download", + allowed_fn=lambda hub, _: hub.config.option_allow_bandwidth_sensors, + api_handler_fn=lambda api: api.ports, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda port: f"{port.name} RX", + object_fn=lambda api, obj_id: api.ports[obj_id], + unique_id_fn=lambda hub, obj_id: f"port_rx-{obj_id}", + value_fn=lambda hub, port: port.rx_bytes_r, + ), + UnifiSensorEntityDescription[Ports, Port]( + key="Port Bandwidth sensor TX", + device_class=SensorDeviceClass.DATA_RATE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfDataRate.BYTES_PER_SECOND, + suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND, + icon="mdi:upload", + allowed_fn=lambda hub, _: hub.config.option_allow_bandwidth_sensors, + api_handler_fn=lambda api: api.ports, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda port: f"{port.name} TX", + object_fn=lambda api, obj_id: api.ports[obj_id], + unique_id_fn=lambda hub, obj_id: f"port_tx-{obj_id}", + value_fn=lambda hub, port: port.tx_bytes_r, + ), UnifiSensorEntityDescription[Clients, Client]( key="Client uptime", device_class=SensorDeviceClass.TIMESTAMP, @@ -350,19 +386,6 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( value_fn=async_device_state_value_fn, options=list(DEVICE_STATES.values()), ), - UnifiSensorEntityDescription[Wlans, Wlan]( - key="WLAN password", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - api_handler_fn=lambda api: api.wlans, - available_fn=async_wlan_available_fn, - device_info_fn=async_wlan_device_info_fn, - name_fn=lambda wlan: "Password", - object_fn=lambda api, obj_id: api.wlans[obj_id], - supported_fn=lambda hub, obj_id: hub.api.wlans[obj_id].x_passphrase is not None, - unique_id_fn=lambda hub, obj_id: f"password-{obj_id}", - value_fn=lambda hub, obj: obj.x_passphrase, - ), UnifiSensorEntityDescription[Devices, Device]( key="Device CPU utilization", entity_category=EntityCategory.DIAGNOSTIC, @@ -437,7 +460,7 @@ class UnifiSensorEntity(UnifiEntity[HandlerT, ApiItemT], SensorEntity): if description.is_connected_fn is not None: # Send heartbeat if client is connected if description.is_connected_fn(self.hub, self._obj_id): - self.hub.async_heartbeat( + self.hub.update_heartbeat( self._attr_unique_id, dt_util.utcnow() + self.hub.config.option_detection_time, ) @@ -462,4 +485,4 @@ class UnifiSensorEntity(UnifiEntity[HandlerT, ApiItemT], SensorEntity): if self.entity_description.is_connected_fn is not None: # Remove heartbeat registration - self.hub.async_heartbeat(self._attr_unique_id) + self.hub.remove_heartbeat(self._attr_unique_id) diff --git a/homeassistant/components/unifi/switch.py b/homeassistant/components/unifi/switch.py index 6e073a655a5..45357dd67d2 100644 --- a/homeassistant/components/unifi/switch.py +++ b/homeassistant/components/unifi/switch.py @@ -147,7 +147,7 @@ async def async_poe_port_control_fn(hub: UnifiHub, obj_id: str, target: bool) -> port = hub.api.ports[obj_id] on_state = "auto" if port.raw["poe_caps"] != 8 else "passthrough" state = on_state if target else "off" - hub.async_queue_poe_port_command(mac, int(index), state) + hub.queue_poe_port_command(mac, int(index), state) async def async_port_forward_control_fn( diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index ea03c4b15f1..479b7f02024 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -145,7 +145,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Handle a clear cache service call.""" # clear the cache with suppress(FileNotFoundError): - if CONF_ADDRESS in call.data and call.data[CONF_ADDRESS]: + if call.data.get(CONF_ADDRESS): await hass.async_add_executor_job( os.unlink, hass.config.path( diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index f37de104659..823d682d339 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -34,6 +34,7 @@ class VelbusCover(VelbusEntity, CoverEntity): """Representation a Velbus cover.""" _channel: VelbusBlind + _assumed_closed: bool def __init__(self, channel: VelbusBlind) -> None: """Initialize the cover.""" @@ -51,11 +52,16 @@ class VelbusCover(VelbusEntity, CoverEntity): | CoverEntityFeature.CLOSE | CoverEntityFeature.STOP ) + self._attr_assumed_state = True + # guess the state to get the open/closed icons somewhat working + self._assumed_closed = False @property def is_closed(self) -> bool | None: """Return if the cover is closed.""" - return self._channel.is_closed() + if self._channel.support_position(): + return self._channel.is_closed() + return self._assumed_closed @property def is_opening(self) -> bool: @@ -83,11 +89,13 @@ class VelbusCover(VelbusEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self._channel.open() + self._assumed_closed = False @api_call async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self._channel.close() + self._assumed_closed = True @api_call async def async_stop_cover(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/verisure/lock.py b/homeassistant/components/verisure/lock.py index 227356a2525..da2bc2ced2b 100644 --- a/homeassistant/components/verisure/lock.py +++ b/homeassistant/components/verisure/lock.py @@ -112,7 +112,7 @@ class VerisureDoorlock(CoordinatorEntity[VerisureDataUpdateCoordinator], LockEnt digits = self.coordinator.entry.options.get( CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS ) - return "^\\d{%s}$" % digits + return f"^\\d{{{digits}}}$" @property def is_locked(self) -> bool: diff --git a/homeassistant/components/vizio/const.py b/homeassistant/components/vizio/const.py index 12de3af1cb0..03caa723771 100644 --- a/homeassistant/components/vizio/const.py +++ b/homeassistant/components/vizio/const.py @@ -52,7 +52,9 @@ DEVICE_ID = "pyvizio" DOMAIN = "vizio" COMMON_SUPPORTED_COMMANDS = ( - MediaPlayerEntityFeature.SELECT_SOURCE + MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.SELECT_SOURCE | MediaPlayerEntityFeature.TURN_ON | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.VOLUME_MUTE diff --git a/homeassistant/components/vizio/media_player.py b/homeassistant/components/vizio/media_player.py index c19c091bb3d..18af2c0dbb2 100644 --- a/homeassistant/components/vizio/media_player.py +++ b/homeassistant/components/vizio/media_player.py @@ -159,6 +159,7 @@ class VizioDevice(MediaPlayerEntity): ) self._device = device self._max_volume = float(device.get_max_volume()) + self._attr_assumed_state = True # Entity class attributes that will change with each update (we only include # the ones that are initialized differently from the defaults) @@ -483,3 +484,11 @@ class VizioDevice(MediaPlayerEntity): num = int(self._max_volume * (self._attr_volume_level - volume)) await self._device.vol_down(num=num, log_api_exception=False) self._attr_volume_level = volume + + async def async_media_play(self) -> None: + """Play whatever media is currently active.""" + await self._device.play(log_api_exception=False) + + async def async_media_pause(self) -> None: + """Pause whatever media is currently active.""" + await self._device.pause(log_api_exception=False) diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index ced871b7616..7e2e974e709 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -4,7 +4,9 @@ "codeowners": ["@paoloantinori", "@chemelli74"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/vodafone_station", + "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiovodafone"], + "quality_scale": "silver", "requirements": ["aiovodafone==0.5.4"] } diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index 937c0220cbf..2a08a9b2ebe 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -107,12 +107,12 @@ SENSOR_TYPES: Final = ( VodafoneStationEntityDescription( key="phone_num1", translation_key="phone_num1", - is_suitable=lambda info: info["phone_unavailable1"] == "0", + is_suitable=lambda info: info["phone_num1"] != "", ), VodafoneStationEntityDescription( key="phone_num2", translation_key="phone_num2", - is_suitable=lambda info: info["phone_unavailable2"] == "0", + is_suitable=lambda info: info["phone_num2"] != "", ), VodafoneStationEntityDescription( key="sys_uptime", diff --git a/homeassistant/components/wake_on_lan/switch.py b/homeassistant/components/wake_on_lan/switch.py index a0b54fd8db0..e5c3a055310 100644 --- a/homeassistant/components/wake_on_lan/switch.py +++ b/homeassistant/components/wake_on_lan/switch.py @@ -129,7 +129,7 @@ class WolSwitch(SwitchEntity): if self._attr_assumed_state: self._state = True - self.async_write_ha_state() + self.schedule_update_ha_state() def turn_off(self, **kwargs: Any) -> None: """Turn the device off if an off action is present.""" @@ -138,7 +138,7 @@ class WolSwitch(SwitchEntity): if self._attr_assumed_state: self._state = False - self.async_write_ha_state() + self.schedule_update_ha_state() def update(self) -> None: """Check if device is on and update the state. Only called if assumed state is false.""" diff --git a/homeassistant/components/weatherflow_cloud/config_flow.py b/homeassistant/components/weatherflow_cloud/config_flow.py index 4c905a8451e..e8972c320ed 100644 --- a/homeassistant/components/weatherflow_cloud/config_flow.py +++ b/homeassistant/components/weatherflow_cloud/config_flow.py @@ -50,6 +50,7 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): existing_entry, data={CONF_API_TOKEN: api_token}, reason="reauth_successful", + reload_even_if_entry_is_unchanged=False, ) return self.async_show_form( diff --git a/homeassistant/components/withings/__init__.py b/homeassistant/components/withings/__init__.py index 1fe85f180da..0b86a2b5201 100644 --- a/homeassistant/components/withings/__init__.py +++ b/homeassistant/components/withings/__init__.py @@ -12,6 +12,7 @@ from dataclasses import dataclass, field from datetime import timedelta from typing import TYPE_CHECKING, Any, cast +from aiohttp import ClientError from aiohttp.hdrs import METH_POST from aiohttp.web import Request, Response from aiowithings import NotificationCategory, WithingsClient @@ -274,7 +275,11 @@ class WithingsWebhookManager: async def async_unsubscribe_webhooks(client: WithingsClient) -> None: """Unsubscribe to all Withings webhooks.""" - current_webhooks = await client.list_notification_configurations() + try: + current_webhooks = await client.list_notification_configurations() + except ClientError: + LOGGER.exception("Error when unsubscribing webhooks") + return for webhook_configuration in current_webhooks: LOGGER.debug( diff --git a/homeassistant/components/wolflink/manifest.json b/homeassistant/components/wolflink/manifest.json index 6b51c0fb2cb..88dcce39993 100644 --- a/homeassistant/components/wolflink/manifest.json +++ b/homeassistant/components/wolflink/manifest.json @@ -1,10 +1,10 @@ { "domain": "wolflink", "name": "Wolf SmartSet Service", - "codeowners": ["@adamkrol93"], + "codeowners": ["@adamkrol93", "@mtielen"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/wolflink", "iot_class": "cloud_polling", "loggers": ["wolf_comm"], - "requirements": ["wolf-comm==0.0.6"] + "requirements": ["wolf-comm==0.0.7"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 314f4c6bcf4..e0813cd90cd 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.46"] + "requirements": ["holidays==0.47"] } diff --git a/homeassistant/components/xiaomi_miio/remote.py b/homeassistant/components/xiaomi_miio/remote.py index cd3b3192520..5baaf614b01 100644 --- a/homeassistant/components/xiaomi_miio/remote.py +++ b/homeassistant/components/xiaomi_miio/remote.py @@ -138,8 +138,8 @@ async def async_setup_platform( message = await hass.async_add_executor_job(device.read, slot) _LOGGER.debug("Message received from device: '%s'", message) - if "code" in message and message["code"]: - log_msg = "Received command is: {}".format(message["code"]) + if code := message.get("code"): + log_msg = f"Received command is: {code}" _LOGGER.info(log_msg) persistent_notification.async_create( hass, log_msg, title="Xiaomi Miio Remote" diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index cd6759b5864..b7bd1d4784f 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.2"] + "requirements": ["yolink-api==0.4.3"] } diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 7741673557d..452f11db85b 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,12 +21,12 @@ "universal_silabs_flasher" ], "requirements": [ - "bellows==0.38.1", + "bellows==0.38.2", "pyserial==3.5", "pyserial-asyncio==0.6", - "zha-quirks==0.0.114", + "zha-quirks==0.0.115", "zigpy-deconz==0.23.1", - "zigpy==0.63.5", + "zigpy==0.64.0", "zigpy-xbee==0.20.1", "zigpy-zigate==0.12.0", "zigpy-znp==0.12.1", diff --git a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py index 5b1f85e1a29..4ee10c7bb93 100644 --- a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py +++ b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py @@ -74,9 +74,14 @@ def _detect_radio_hardware(hass: HomeAssistant, device: str) -> HardwareType: return HardwareType.OTHER -async def probe_silabs_firmware_type(device: str) -> ApplicationType | None: +async def probe_silabs_firmware_type( + device: str, *, probe_methods: ApplicationType | None = None +) -> ApplicationType | None: """Probe the running firmware on a Silabs device.""" - flasher = Flasher(device=device) + flasher = Flasher( + device=device, + **({"probe_methods": probe_methods} if probe_methods else {}), + ) try: await flasher.probe_app_type() diff --git a/homeassistant/components/zwave_js/siren.py b/homeassistant/components/zwave_js/siren.py index b3f54ae9904..413186da9bf 100644 --- a/homeassistant/components/zwave_js/siren.py +++ b/homeassistant/components/zwave_js/siren.py @@ -63,7 +63,8 @@ class ZwaveSirenEntity(ZWaveBaseEntity, SirenEntity): super().__init__(config_entry, driver, info) # Entity class attributes self._attr_available_tones = { - int(id): val for id, val in self.info.primary_value.metadata.states.items() + int(state_id): val + for state_id, val in self.info.primary_value.metadata.states.items() } self._attr_supported_features = ( SirenEntityFeature.TURN_ON diff --git a/homeassistant/config.py b/homeassistant/config.py index 61b346944fa..abb29f6a1a1 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -39,6 +39,7 @@ from .const import ( CONF_CUSTOMIZE, CONF_CUSTOMIZE_DOMAIN, CONF_CUSTOMIZE_GLOB, + CONF_DEBUG, CONF_ELEVATION, CONF_EXTERNAL_URL, CONF_ID, @@ -391,6 +392,7 @@ CORE_CONFIG_SCHEMA = vol.All( vol.Optional(CONF_CURRENCY): _validate_currency, vol.Optional(CONF_COUNTRY): cv.country, vol.Optional(CONF_LANGUAGE): cv.language, + vol.Optional(CONF_DEBUG): cv.boolean, } ), _filter_bad_internal_external_urls, @@ -899,6 +901,9 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non if key in config: setattr(hac, attr, config[key]) + if config.get(CONF_DEBUG): + hac.debug = True + _raise_issue_if_legacy_templates(hass, config.get(CONF_LEGACY_TEMPLATES)) _raise_issue_if_historic_currency(hass, hass.config.currency) _raise_issue_if_no_country(hass, hass.config.country) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index bf576b517d3..056814bbc4d 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -1405,7 +1405,9 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): @callback def _async_discovery(self) -> None: """Handle discovery.""" - self.hass.bus.async_fire(EVENT_FLOW_DISCOVERED) + # async_fire_internal is used here because this is only + # called from the Debouncer so we know the usage is safe + self.hass.bus.async_fire_internal(EVENT_FLOW_DISCOVERED) persistent_notification.async_create( self.hass, title="New devices discovered", @@ -2397,6 +2399,7 @@ class ConfigFlow(ConfigEntryBaseFlow): data: Mapping[str, Any] | UndefinedType = UNDEFINED, options: Mapping[str, Any] | UndefinedType = UNDEFINED, reason: str = "reauth_successful", + reload_even_if_entry_is_unchanged: bool = True, ) -> ConfigFlowResult: """Update config entry, reload config entry and finish config flow.""" result = self.hass.config_entries.async_update_entry( @@ -2406,7 +2409,7 @@ class ConfigFlow(ConfigEntryBaseFlow): data=data, options=options, ) - if result: + if reload_even_if_entry_is_unchanged or result: self.hass.config_entries.async_schedule_reload(entry.entry_id) return self.async_abort(reason=reason) diff --git a/homeassistant/const.py b/homeassistant/const.py index 58a1c92ea72..45ff6ecf976 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -22,7 +22,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 5 +MINOR_VERSION: Final = 6 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" @@ -296,6 +296,7 @@ CONF_WHILE: Final = "while" CONF_WHITELIST: Final = "whitelist" CONF_ALLOWLIST_EXTERNAL_DIRS: Final = "allowlist_external_dirs" LEGACY_CONF_WHITELIST_EXTERNAL_DIRS: Final = "whitelist_external_dirs" +CONF_DEBUG: Final = "debug" CONF_XY: Final = "xy" CONF_ZONE: Final = "zone" diff --git a/homeassistant/core.py b/homeassistant/core.py index 01536f8ffdb..a3150adc221 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -36,7 +36,6 @@ from typing import ( TYPE_CHECKING, Any, Generic, - Literal, NotRequired, ParamSpec, Self, @@ -279,17 +278,24 @@ def async_get_hass() -> HomeAssistant: return _hass.hass +class ReleaseChannel(enum.StrEnum): + BETA = "beta" + DEV = "dev" + NIGHTLY = "nightly" + STABLE = "stable" + + @callback -def get_release_channel() -> Literal["beta", "dev", "nightly", "stable"]: +def get_release_channel() -> ReleaseChannel: """Find release channel based on version number.""" version = __version__ if "dev0" in version: - return "dev" + return ReleaseChannel.DEV if "dev" in version: - return "nightly" + return ReleaseChannel.NIGHTLY if "b" in version: - return "beta" - return "stable" + return ReleaseChannel.BETA + return ReleaseChannel.STABLE @enum.unique @@ -423,6 +429,20 @@ class HomeAssistant: max_workers=1, thread_name_prefix="ImportExecutor" ) + def verify_event_loop_thread(self, what: str) -> None: + """Report and raise if we are not running in the event loop thread.""" + if ( + loop_thread_ident := self.loop.__dict__.get("_thread_ident") + ) and loop_thread_ident != threading.get_ident(): + from .helpers import frame # pylint: disable=import-outside-toplevel + + # frame is a circular import, so we import it here + frame.report( + f"calls {what} from a thread", + error_if_core=True, + error_if_integration=True, + ) + @property def _active_tasks(self) -> set[asyncio.Future[Any]]: """Return all active tasks. @@ -497,11 +517,10 @@ class HomeAssistant: This method is a coroutine. """ _LOGGER.info("Starting Home Assistant") - setattr(self.loop, "_thread_ident", threading.get_ident()) self.set_state(CoreState.starting) - self.bus.async_fire(EVENT_CORE_CONFIG_UPDATE) - self.bus.async_fire(EVENT_HOMEASSISTANT_START) + self.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_START) if not self._tasks: pending: set[asyncio.Future[Any]] | None = None @@ -534,8 +553,8 @@ class HomeAssistant: return self.set_state(CoreState.running) - self.bus.async_fire(EVENT_CORE_CONFIG_UPDATE) - self.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + self.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_STARTED) def add_job( self, target: Callable[[*_Ts], Any] | Coroutine[Any, Any, Any], *args: *_Ts @@ -1109,7 +1128,7 @@ class HomeAssistant: self.exit_code = exit_code self.set_state(CoreState.stopping) - self.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_STOP) try: async with self.timeout.async_timeout(STOP_STAGE_SHUTDOWN_TIMEOUT): await self.async_block_till_done() @@ -1122,7 +1141,7 @@ class HomeAssistant: # Stage 3 - Final write self.set_state(CoreState.final_write) - self.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_FINAL_WRITE) try: async with self.timeout.async_timeout(FINAL_WRITE_STAGE_SHUTDOWN_TIMEOUT): await self.async_block_till_done() @@ -1135,7 +1154,7 @@ class HomeAssistant: # Stage 4 - Close self.set_state(CoreState.not_running) - self.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE) + self.bus.async_fire_internal(EVENT_HOMEASSISTANT_CLOSE) # Make a copy of running_tasks since a task can finish # while we are awaiting canceled tasks to get their result @@ -1384,10 +1403,16 @@ class _OneTimeListener(Generic[_DataT]): return f"<_OneTimeListener {self.listener_job.target}>" -# Empty list, used by EventBus._async_fire +# Empty list, used by EventBus.async_fire_internal EMPTY_LIST: list[Any] = [] +def _verify_event_type_length_or_raise(event_type: EventType[_DataT] | str) -> None: + """Verify the length of the event type and raise if too long.""" + if len(event_type) > MAX_LENGTH_EVENT_EVENT_TYPE: + raise MaxLengthExceeded(event_type, "event_type", MAX_LENGTH_EVENT_EVENT_TYPE) + + class EventBus: """Allow the firing of and listening for events.""" @@ -1428,8 +1453,9 @@ class EventBus: context: Context | None = None, ) -> None: """Fire an event.""" + _verify_event_type_length_or_raise(event_type) self._hass.loop.call_soon_threadsafe( - self.async_fire, event_type, event_data, origin, context + self.async_fire_internal, event_type, event_data, origin, context ) @callback @@ -1445,14 +1471,14 @@ class EventBus: This method must be run in the event loop. """ - if len(event_type) > MAX_LENGTH_EVENT_EVENT_TYPE: - raise MaxLengthExceeded( - event_type, "event_type", MAX_LENGTH_EVENT_EVENT_TYPE - ) - return self._async_fire(event_type, event_data, origin, context, time_fired) + _verify_event_type_length_or_raise(event_type) + self._hass.verify_event_loop_thread("async_fire") + return self.async_fire_internal( + event_type, event_data, origin, context, time_fired + ) @callback - def _async_fire( + def async_fire_internal( self, event_type: EventType[_DataT] | str, event_data: _DataT | None = None, @@ -1460,7 +1486,12 @@ class EventBus: context: Context | None = None, time_fired: float | None = None, ) -> None: - """Fire an event. + """Fire an event, for internal use only. + + This method is intended to only be used by core internally + and should not be considered a stable API. We will make + breaking change to this function in the future and it + should not be used in integrations. This method must be run in the event loop. """ @@ -2106,7 +2137,7 @@ class StateMachine: "old_state": old_state, "new_state": None, } - self._bus._async_fire( # pylint: disable=protected-access + self._bus.async_fire_internal( EVENT_STATE_CHANGED, state_changed_data, context=context, @@ -2219,7 +2250,7 @@ class StateMachine: # mypy does not understand this is only possible if old_state is not None old_last_reported = old_state.last_reported # type: ignore[union-attr] old_state.last_reported = now # type: ignore[union-attr] - self._bus._async_fire( # pylint: disable=protected-access + self._bus.async_fire_internal( EVENT_STATE_REPORTED, { "entity_id": entity_id, @@ -2262,7 +2293,7 @@ class StateMachine: "old_state": old_state, "new_state": state, } - self._bus._async_fire( # pylint: disable=protected-access + self._bus.async_fire_internal( EVENT_STATE_CHANGED, state_changed_data, context=context, @@ -2425,7 +2456,7 @@ class ServiceRegistry: """ run_callback_threadsafe( self._hass.loop, - self.async_register, + self._async_register, domain, service, service_func, @@ -2453,6 +2484,33 @@ class ServiceRegistry: Schema is called to coerce and validate the service data. + This method must be run in the event loop. + """ + self._hass.verify_event_loop_thread("async_register") + self._async_register( + domain, service, service_func, schema, supports_response, job_type + ) + + @callback + def _async_register( + self, + domain: str, + service: str, + service_func: Callable[ + [ServiceCall], + Coroutine[Any, Any, ServiceResponse | EntityServiceResponse] + | ServiceResponse + | EntityServiceResponse + | None, + ], + schema: vol.Schema | None = None, + supports_response: SupportsResponse = SupportsResponse.NONE, + job_type: HassJobType | None = None, + ) -> None: + """Register a service. + + Schema is called to coerce and validate the service data. + This method must be run in the event loop. """ domain = domain.lower() @@ -2471,20 +2529,29 @@ class ServiceRegistry: else: self._services[domain] = {service: service_obj} - self._hass.bus.async_fire( + self._hass.bus.async_fire_internal( EVENT_SERVICE_REGISTERED, {ATTR_DOMAIN: domain, ATTR_SERVICE: service} ) def remove(self, domain: str, service: str) -> None: """Remove a registered service from service handler.""" run_callback_threadsafe( - self._hass.loop, self.async_remove, domain, service + self._hass.loop, self._async_remove, domain, service ).result() @callback def async_remove(self, domain: str, service: str) -> None: """Remove a registered service from service handler. + This method must be run in the event loop. + """ + self._hass.verify_event_loop_thread("async_remove") + self._async_remove(domain, service) + + @callback + def _async_remove(self, domain: str, service: str) -> None: + """Remove a registered service from service handler. + This method must be run in the event loop. """ domain = domain.lower() @@ -2499,7 +2566,7 @@ class ServiceRegistry: if not self._services[domain]: self._services.pop(domain) - self._hass.bus.async_fire( + self._hass.bus.async_fire_internal( EVENT_SERVICE_REMOVED, {ATTR_DOMAIN: domain, ATTR_SERVICE: service} ) @@ -2583,7 +2650,7 @@ class ServiceRegistry: if handler.supports_response is SupportsResponse.NONE: raise ServiceValidationError( translation_domain=DOMAIN, - translation_key="service_does_not_supports_reponse", + translation_key="service_does_not_support_response", translation_placeholders={ "return_response": "return_response=True" }, @@ -2616,7 +2683,7 @@ class ServiceRegistry: domain, service, processed_data, context, return_response ) - self._hass.bus._async_fire( # pylint: disable=protected-access + self._hass.bus.async_fire_internal( EVENT_CALL_SERVICE, { ATTR_DOMAIN: domain, @@ -2736,6 +2803,7 @@ class Config: self.elevation: int = 0 """Elevation (always in meters regardless of the unit system).""" + self.debug: bool = False self.location_name: str = "Home" self.time_zone: str = "UTC" self.units: UnitSystem = METRIC_SYSTEM @@ -2876,6 +2944,7 @@ class Config: "country": self.country, "language": self.language, "safe_mode": self.safe_mode, + "debug": self.debug, } def set_time_zone(self, time_zone_str: str) -> None: @@ -2942,7 +3011,7 @@ class Config: self._update(source=ConfigSource.STORAGE, **kwargs) await self._async_store() - self.hass.bus.async_fire(EVENT_CORE_CONFIG_UPDATE, kwargs) + self.hass.bus.async_fire_internal(EVENT_CORE_CONFIG_UPDATE, kwargs) _raise_issue_if_historic_currency(self.hass, self.currency) _raise_issue_if_no_country(self.hass, self.country) diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 7e7019681af..f628879a7fd 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -442,7 +442,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): ) ): # Tell frontend to reload the flow state. - self.hass.bus.async_fire( + self.hass.bus.async_fire_internal( EVENT_DATA_ENTRY_FLOW_PROGRESSED, {"handler": flow.handler, "flow_id": flow_id, "refresh": True}, ) diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 6740c39b016..6d59a731879 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -152,6 +152,7 @@ FLOWS = { "enocean", "enphase_envoy", "environment_canada", + "epic_games_store", "epion", "epson", "eq3btsmart", @@ -174,6 +175,7 @@ FLOWS = { "flo", "flume", "flux_led", + "folder_watcher", "forecast_solar", "forked_daapd", "foscam", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 77246604df9..cd0e449bd09 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1649,6 +1649,12 @@ "config_flow": false, "iot_class": "local_polling" }, + "epic_games_store": { + "name": "Epic Games Store", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "epion": { "name": "Epion", "integration_type": "hub", @@ -1950,7 +1956,7 @@ "folder_watcher": { "name": "Folder Watcher", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "foobot": { @@ -2559,6 +2565,11 @@ "integration_type": "virtual", "supported_by": "netatmo" }, + "homeassistant_sky_connect": { + "name": "Home Assistant SkyConnect", + "integration_type": "device", + "config_flow": true + }, "homematic": { "name": "Homematic", "integrations": { diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 38287eb6722..bf20a2d7f5f 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1106,7 +1106,7 @@ def empty_config_schema(domain: str) -> Callable[[dict], dict]: """Return a config schema which logs if there are configuration parameters.""" def validator(config: dict) -> dict: - if domain in config and config[domain]: + if config_domain := config.get(domain): get_integration_logger(__name__).error( ( "The %s integration does not support any configuration parameters, " @@ -1114,7 +1114,7 @@ def empty_config_schema(domain: str) -> Callable[[dict], dict]: "configuration." ), domain, - config[domain], + config_domain, ) return config diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 3a9d047810b..aec5dbc6c4a 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -13,7 +13,13 @@ import attr from yarl import URL from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP -from homeassistant.core import Event, HomeAssistant, callback, get_release_channel +from homeassistant.core import ( + Event, + HomeAssistant, + ReleaseChannel, + callback, + get_release_channel, +) from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import async_suggest_report_issue from homeassistant.util.event_type import EventType @@ -608,8 +614,8 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): try: return name.format(**translation_placeholders) except KeyError as err: - if get_release_channel() != "stable": - raise HomeAssistantError("Missing placeholder %s" % err) from err + if get_release_channel() is not ReleaseChannel.STABLE: + raise HomeAssistantError(f"Missing placeholder {err}") from err report_issue = async_suggest_report_issue( self.hass, integration_domain=domain ) @@ -963,12 +969,16 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): tuple(conn) # type: ignore[misc] for conn in device["connections"] }, - disabled_by=DeviceEntryDisabler(device["disabled_by"]) - if device["disabled_by"] - else None, - entry_type=DeviceEntryType(device["entry_type"]) - if device["entry_type"] - else None, + disabled_by=( + DeviceEntryDisabler(device["disabled_by"]) + if device["disabled_by"] + else None + ), + entry_type=( + DeviceEntryType(device["entry_type"]) + if device["entry_type"] + else None + ), hw_version=device["hw_version"], id=device["id"], identifiers={ diff --git a/homeassistant/helpers/dispatcher.py b/homeassistant/helpers/dispatcher.py index c1194c7da01..aa8176a1b83 100644 --- a/homeassistant/helpers/dispatcher.py +++ b/homeassistant/helpers/dispatcher.py @@ -7,7 +7,12 @@ from functools import partial import logging from typing import Any, TypeVarTuple, overload -from homeassistant.core import HassJob, HomeAssistant, callback +from homeassistant.core import ( + HassJob, + HomeAssistant, + callback, + get_hassjob_callable_job_type, +) from homeassistant.loader import bind_hass from homeassistant.util.async_ import run_callback_threadsafe from homeassistant.util.logging import catch_log_exception @@ -161,9 +166,13 @@ def _generate_job( signal: SignalType[*_Ts] | str, target: Callable[[*_Ts], Any] | Callable[..., Any] ) -> HassJob[..., None | Coroutine[Any, Any, None]]: """Generate a HassJob for a signal and target.""" + job_type = get_hassjob_callable_job_type(target) return HassJob( - catch_log_exception(target, partial(_format_err, signal, target)), + catch_log_exception( + target, partial(_format_err, signal, target), job_type=job_type + ), f"dispatcher {signal}", + job_type=job_type, ) @@ -190,6 +199,9 @@ def async_dispatcher_send( This method must be run in the event loop. """ + if hass.config.debug: + hass.verify_event_loop_thread("async_dispatcher_send") + if (maybe_dispatchers := hass.data.get(DATA_DISPATCHER)) is None: return dispatchers: _DispatcherDataType[*_Ts] = maybe_dispatchers diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 20948a7130a..a2fc16f8a82 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -52,6 +52,7 @@ from homeassistant.core import ( Event, HassJobType, HomeAssistant, + ReleaseChannel, callback, get_hassjob_callable_job_type, get_release_channel, @@ -520,6 +521,7 @@ class Entity( # While not purely typed, it makes typehinting more useful for us # and removes the need for constant None checks or asserts. _state_info: StateInfo = None # type: ignore[assignment] + _is_custom_component: bool = False __capabilities_updated_at: deque[float] __capabilities_updated_at_reported: bool = False @@ -657,8 +659,8 @@ class Entity( return name.format(**self.translation_placeholders) except KeyError as err: if not self._name_translation_placeholders_reported: - if get_release_channel() != "stable": - raise HomeAssistantError("Missing placeholder %s" % err) from err + if get_release_channel() is not ReleaseChannel.STABLE: + raise HomeAssistantError(f"Missing placeholder {err}") from err report_issue = self._suggest_report_issue() _LOGGER.warning( ( @@ -966,8 +968,8 @@ class Entity( self._async_write_ha_state() @callback - def async_write_ha_state(self) -> None: - """Write the state to the state machine.""" + def _async_verify_state_writable(self) -> None: + """Verify the entity is in a writable state.""" if self.hass is None: raise RuntimeError(f"Attribute hass is None for {self}") @@ -992,6 +994,18 @@ class Entity( f"No entity id specified for entity {self.name}" ) + @callback + def _async_write_ha_state_from_call_soon_threadsafe(self) -> None: + """Write the state to the state machine from the event loop thread.""" + self._async_verify_state_writable() + self._async_write_ha_state() + + @callback + def async_write_ha_state(self) -> None: + """Write the state to the state machine.""" + self._async_verify_state_writable() + if self._is_custom_component or self.hass.config.debug: + self.hass.verify_event_loop_thread("async_write_ha_state") self._async_write_ha_state() def _stringify_state(self, available: bool) -> str: @@ -1218,7 +1232,9 @@ class Entity( f"Entity {self.entity_id} schedule update ha state", ) else: - self.hass.loop.call_soon_threadsafe(self.async_write_ha_state) + self.hass.loop.call_soon_threadsafe( + self._async_write_ha_state_from_call_soon_threadsafe + ) @callback def async_schedule_update_ha_state(self, force_refresh: bool = False) -> None: @@ -1423,10 +1439,12 @@ class Entity( Not to be extended by integrations. """ + is_custom_component = "custom_components" in type(self).__module__ entity_info: EntityInfo = { "domain": self.platform.platform_name, - "custom_component": "custom_components" in type(self).__module__, + "custom_component": is_custom_component, } + self._is_custom_component = is_custom_component if self.platform.config_entry: entity_info["config_entry"] = self.platform.config_entry.entry_id diff --git a/homeassistant/helpers/entity_registry.py b/homeassistant/helpers/entity_registry.py index 4e77df49ea6..436fc5a18de 100644 --- a/homeassistant/helpers/entity_registry.py +++ b/homeassistant/helpers/entity_registry.py @@ -636,7 +636,6 @@ def _validate_item( unique_id, report_issue, ) - return if ( disabled_by and disabled_by is not UNDEFINED diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 7fae0976686..5cffe992c0d 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -3,11 +3,12 @@ from __future__ import annotations import asyncio +from collections import defaultdict from collections.abc import Callable, Coroutine, Iterable, Mapping, Sequence import copy from dataclasses import dataclass from datetime import datetime, timedelta -import functools as ft +from functools import partial, wraps import logging from random import randint import time @@ -161,7 +162,7 @@ def threaded_listener_factory( ) -> Callable[Concatenate[HomeAssistant, _P], CALLBACK_TYPE]: """Convert an async event helper to a threaded one.""" - @ft.wraps(async_factory) + @wraps(async_factory) def factory( hass: HomeAssistant, *args: _P.args, **kwargs: _P.kwargs ) -> CALLBACK_TYPE: @@ -170,7 +171,7 @@ def threaded_listener_factory( raise TypeError("First parameter needs to be a hass instance") async_remove = run_callback_threadsafe( - hass.loop, ft.partial(async_factory, hass, *args, **kwargs) + hass.loop, partial(async_factory, hass, *args, **kwargs) ).result() def remove() -> None: @@ -409,19 +410,16 @@ def _async_track_event( return _remove_empty_listener hass_data = hass.data - callbacks_key = tracker.callbacks_key - - callbacks: dict[str, list[HassJob[[Event[_TypedDictT]], Any]]] | None - if not (callbacks := hass_data.get(callbacks_key)): - callbacks = hass_data[callbacks_key] = {} + callbacks: defaultdict[str, list[HassJob[[Event[_TypedDictT]], Any]]] | None + if not (callbacks := hass_data.get(tracker.callbacks_key)): + callbacks = hass_data[tracker.callbacks_key] = defaultdict(list) listeners_key = tracker.listeners_key - - if listeners_key not in hass_data: - hass_data[listeners_key] = hass.bus.async_listen( + if tracker.listeners_key not in hass_data: + hass_data[tracker.listeners_key] = hass.bus.async_listen( tracker.event_type, - ft.partial(tracker.dispatcher_callable, hass, callbacks), - event_filter=ft.partial(tracker.filter_callable, hass, callbacks), + partial(tracker.dispatcher_callable, hass, callbacks), + event_filter=partial(tracker.filter_callable, hass, callbacks), ) job = HassJob(action, f"track {tracker.event_type} event {keys}", job_type=job_type) @@ -432,19 +430,13 @@ def _async_track_event( # here because this function gets called ~20000 times # during startup, and we want to avoid the overhead of # creating empty lists and throwing them away. - if callback_list := callbacks.get(keys): - callback_list.append(job) - else: - callbacks[keys] = [job] + callbacks[keys].append(job) keys = [keys] else: for key in keys: - if callback_list := callbacks.get(key): - callback_list.append(job) - else: - callbacks[key] = [job] + callbacks[key].append(job) - return ft.partial(_remove_listener, hass, listeners_key, keys, job, callbacks) + return partial(_remove_listener, hass, listeners_key, keys, job, callbacks) @callback diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index d86fec3de43..068a12c0598 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -136,6 +136,7 @@ def report( error_if_core: bool = True, level: int = logging.WARNING, log_custom_component_only: bool = False, + error_if_integration: bool = False, ) -> None: """Report incorrect usage. @@ -153,14 +154,19 @@ def report( _LOGGER.warning(msg, stack_info=True) return - if not log_custom_component_only or integration_frame.custom_integration: - _report_integration(what, integration_frame, level) + if ( + error_if_integration + or not log_custom_component_only + or integration_frame.custom_integration + ): + _report_integration(what, integration_frame, level, error_if_integration) def _report_integration( what: str, integration_frame: IntegrationFrame, level: int = logging.WARNING, + error: bool = False, ) -> None: """Report incorrect usage in an integration. @@ -168,7 +174,7 @@ def _report_integration( """ # Keep track of integrations already reported to prevent flooding key = f"{integration_frame.filename}:{integration_frame.line_number}" - if key in _REPORTED_INTEGRATIONS: + if not error and key in _REPORTED_INTEGRATIONS: return _REPORTED_INTEGRATIONS.add(key) @@ -180,11 +186,11 @@ def _report_integration( integration_domain=integration_frame.integration, module=integration_frame.module, ) - + integration_type = "custom " if integration_frame.custom_integration else "" _LOGGER.log( level, "Detected that %sintegration '%s' %s at %s, line %s: %s, please %s", - "custom " if integration_frame.custom_integration else "", + integration_type, integration_frame.integration, what, integration_frame.relative_filename, @@ -192,6 +198,15 @@ def _report_integration( integration_frame.line, report_issue, ) + if not error: + return + raise RuntimeError( + f"Detected that {integration_type}integration " + f"'{integration_frame.integration}' {what} at " + f"{integration_frame.relative_filename}, line " + f"{integration_frame.line_number}: {integration_frame.line}. " + f"Please {report_issue}." + ) def warn_use(func: _CallableT, what: str) -> _CallableT: diff --git a/homeassistant/helpers/network.py b/homeassistant/helpers/network.py index 6e8fa8dc3a3..d5891973e40 100644 --- a/homeassistant/helpers/network.py +++ b/homeassistant/helpers/network.py @@ -122,6 +122,7 @@ def get_url( require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, + require_cloud: bool = False, allow_internal: bool = True, allow_external: bool = True, allow_cloud: bool = True, @@ -145,7 +146,7 @@ def get_url( # Try finding an URL in the order specified for url_type in order: - if allow_internal and url_type == TYPE_URL_INTERNAL: + if allow_internal and url_type == TYPE_URL_INTERNAL and not require_cloud: with suppress(NoURLAvailableError): return _get_internal_url( hass, @@ -155,7 +156,7 @@ def get_url( require_standard_port=require_standard_port, ) - if allow_external and url_type == TYPE_URL_EXTERNAL: + if require_cloud or (allow_external and url_type == TYPE_URL_EXTERNAL): with suppress(NoURLAvailableError): return _get_external_url( hass, @@ -165,7 +166,10 @@ def get_url( require_current_request=require_current_request, require_ssl=require_ssl, require_standard_port=require_standard_port, + require_cloud=require_cloud, ) + if require_cloud: + raise NoURLAvailableError # For current request, we accept loopback interfaces (e.g., 127.0.0.1), # the Supervisor hostname and localhost transparently @@ -263,8 +267,12 @@ def _get_external_url( require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, + require_cloud: bool = False, ) -> str: """Get external URL of this instance.""" + if require_cloud: + return _get_cloud_url(hass, require_current_request=require_current_request) + if prefer_cloud and allow_cloud: with suppress(NoURLAvailableError): return _get_cloud_url(hass) diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index ea5cc3e571a..d925bf215ab 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -650,6 +650,12 @@ class _ScriptRun: # check if condition already okay if condition.async_template(self._hass, wait_template, self._variables, False): self._variables["wait"]["completed"] = True + self._changed() + return + + if timeout == 0: + self._changed() + self._async_handle_timeout() return futures, timeout_handle, timeout_future = self._async_futures_with_timeout( @@ -778,7 +784,7 @@ class _ScriptRun: ) trace_set_result(event=self._action[CONF_EVENT], event_data=event_data) - self._hass.bus.async_fire( + self._hass.bus.async_fire_internal( self._action[CONF_EVENT], event_data, context=self._context ) @@ -1078,6 +1084,11 @@ class _ScriptRun: self._variables["wait"] = {"remaining": timeout, "trigger": None} trace_set_result(wait=self._variables["wait"]) + if timeout == 0: + self._changed() + self._async_handle_timeout() + return + futures, timeout_handle, timeout_future = self._async_futures_with_timeout( timeout ) @@ -1108,6 +1119,14 @@ class _ScriptRun: futures, timeout_handle, timeout_future, remove_triggers ) + def _async_handle_timeout(self) -> None: + """Handle timeout.""" + self._variables["wait"]["remaining"] = 0.0 + if not self._action.get(CONF_CONTINUE_ON_TIMEOUT, True): + self._log(_TIMEOUT_MSG) + trace_set_result(wait=self._variables["wait"], timeout=True) + raise _AbortScript from TimeoutError() + async def _async_wait_with_optional_timeout( self, futures: list[asyncio.Future[None]], @@ -1118,11 +1137,7 @@ class _ScriptRun: try: await asyncio.wait(futures, return_when=asyncio.FIRST_COMPLETED) if timeout_future and timeout_future.done(): - self._variables["wait"]["remaining"] = 0.0 - if not self._action.get(CONF_CONTINUE_ON_TIMEOUT, True): - self._log(_TIMEOUT_MSG) - trace_set_result(wait=self._variables["wait"], timeout=True) - raise _AbortScript from TimeoutError() + self._async_handle_timeout() finally: if timeout_future and not timeout_future.done() and timeout_handle: timeout_handle.cancel() diff --git a/homeassistant/helpers/service_info/mqtt.py b/homeassistant/helpers/service_info/mqtt.py index 172a5eeff33..b683745e1c0 100644 --- a/homeassistant/helpers/service_info/mqtt.py +++ b/homeassistant/helpers/service_info/mqtt.py @@ -1,7 +1,6 @@ """MQTT Discovery data.""" from dataclasses import dataclass -import datetime as dt from homeassistant.data_entry_flow import BaseServiceInfo @@ -17,4 +16,4 @@ class MqttServiceInfo(BaseServiceInfo): qos: int retain: bool subscribed_topic: str - timestamp: dt.datetime + timestamp: float diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 1f0742e896d..c12494ba71b 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -9,7 +9,7 @@ import collections.abc from collections.abc import Callable, Generator, Iterable from contextlib import AbstractContextManager, suppress from contextvars import ContextVar -from datetime import datetime, timedelta +from datetime import date, datetime, time, timedelta from functools import cache, lru_cache, partial, wraps import json import logging @@ -695,6 +695,8 @@ class Template: **kwargs: Any, ) -> RenderInfo: """Render the template and collect an entity filter.""" + if self.hass and self.hass.config.debug: + self.hass.verify_event_loop_thread("async_render_to_info") self._renders += 1 assert self.hass and _render_info.get() is None @@ -1347,8 +1349,8 @@ def device_id(hass: HomeAssistant, entity_id_or_device_name: str) -> str | None: dev_reg = device_registry.async_get(hass) return next( ( - id - for id, device in dev_reg.devices.items() + device_id + for device_id, device in dev_reg.devices.items() if (name := device.name_by_user or device.name) and (str(entity_id_or_device_name) == name) ), @@ -2001,12 +2003,12 @@ def square_root(value, default=_SENTINEL): def timestamp_custom(value, date_format=DATE_STR_FORMAT, local=True, default=_SENTINEL): """Filter to convert given timestamp to format.""" try: - date = dt_util.utc_from_timestamp(value) + result = dt_util.utc_from_timestamp(value) if local: - date = dt_util.as_local(date) + result = dt_util.as_local(result) - return date.strftime(date_format) + return result.strftime(date_format) except (ValueError, TypeError): # If timestamp can't be converted if default is _SENTINEL: @@ -2048,6 +2050,12 @@ def forgiving_as_timestamp(value, default=_SENTINEL): def as_datetime(value: Any, default: Any = _SENTINEL) -> Any: """Filter and to convert a time string or UNIX timestamp to datetime object.""" + # Return datetime.datetime object without changes + if type(value) is datetime: + return value + # Add midnight to datetime.date object + if type(value) is date: + return datetime.combine(value, time(0, 0, 0)) try: # Check for a valid UNIX timestamp string, int or float timestamp = float(value) @@ -2468,10 +2476,15 @@ def relative_time(hass: HomeAssistant, value: Any) -> Any: The age can be in second, minute, hour, day, month or year. Only the biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will be returned. - Make sure date is not in the future, or else it will return None. + If the input datetime is in the future, + the input datetime will be returned. If the input are not a datetime object the input will be returned unmodified. + + Note: This template function is deprecated in favor of `time_until`, but is still + supported so as not to break old templates. """ + if (render_info := _render_info.get()) is not None: render_info.has_time = True @@ -2484,6 +2497,50 @@ def relative_time(hass: HomeAssistant, value: Any) -> Any: return dt_util.get_age(value) +def time_since(hass: HomeAssistant, value: Any | datetime, precision: int = 1) -> Any: + """Take a datetime and return its "age" as a string. + + The age can be in seconds, minutes, hours, days, months and year. + + precision is the number of units to return, with the last unit rounded. + + If the value not a datetime object the input will be returned unmodified. + """ + if (render_info := _render_info.get()) is not None: + render_info.has_time = True + + if not isinstance(value, datetime): + return value + if not value.tzinfo: + value = dt_util.as_local(value) + if dt_util.now() < value: + return value + + return dt_util.get_age(value, precision) + + +def time_until(hass: HomeAssistant, value: Any | datetime, precision: int = 1) -> Any: + """Take a datetime and return the amount of time until that time as a string. + + The time until can be in seconds, minutes, hours, days, months and years. + + precision is the number of units to return, with the last unit rounded. + + If the value not a datetime object the input will be returned unmodified. + """ + if (render_info := _render_info.get()) is not None: + render_info.has_time = True + + if not isinstance(value, datetime): + return value + if not value.tzinfo: + value = dt_util.as_local(value) + if dt_util.now() > value: + return value + + return dt_util.get_time_remaining(value, precision) + + def urlencode(value): """Urlencode dictionary and return as UTF-8 string.""" return urllib_urlencode(value).encode("utf-8") @@ -2882,6 +2939,8 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): "floor_id", "floor_name", "relative_time", + "time_since", + "time_until", "today_at", "label_id", "label_name", @@ -2938,6 +2997,10 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["now"] = hassfunction(now) self.globals["relative_time"] = hassfunction(relative_time) self.filters["relative_time"] = self.globals["relative_time"] + self.globals["time_since"] = hassfunction(time_since) + self.filters["time_since"] = self.globals["time_since"] + self.globals["time_until"] = hassfunction(time_until) + self.filters["time_until"] = self.globals["time_until"] self.globals["today_at"] = hassfunction(today_at) self.filters["today_at"] = self.globals["today_at"] diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 7f134b1a93d..442db45e714 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -1,7 +1,7 @@ # Automatically generated by gen_requirements_all.py, do not edit aiodhcpwatcher==1.0.0 -aiodiscover==2.0.0 +aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-url-dispatcher==0.3.0 aiohttp-isal==0.2.0 @@ -17,8 +17,8 @@ awesomeversion==24.2.0 bcrypt==4.1.2 bleak-retry-connector==3.5.0 bleak==0.21.1 -bluetooth-adapters==0.18.0 -bluetooth-auto-recovery==1.4.1 +bluetooth-adapters==0.19.0 +bluetooth-auto-recovery==1.4.2 bluetooth-data-tools==1.19.0 cached_ipaddress==0.3.0 certifi>=2021.5.30 @@ -32,14 +32,14 @@ habluetooth==2.8.0 hass-nabucasa==0.78.0 hassil==1.6.1 home-assistant-bluetooth==1.12.0 -home-assistant-frontend==20240404.2 -home-assistant-intents==2024.4.3 +home-assistant-frontend==20240424.1 +home-assistant-intents==2024.4.24 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.3 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.1 +orjson==3.9.15 packaging>=23.1 paho-mqtt==1.6.1 Pillow==10.3.0 diff --git a/homeassistant/requirements.py b/homeassistant/requirements.py index e78398ebf03..e282ced90ac 100644 --- a/homeassistant/requirements.py +++ b/homeassistant/requirements.py @@ -122,6 +122,11 @@ def _install_requirements_if_missing( return installed, failures +def _set_result_unless_done(future: asyncio.Future[None]) -> None: + if not future.done(): + future.set_result(None) + + class RequirementsManager: """Manage requirements.""" @@ -144,16 +149,13 @@ class RequirementsManager: is invalid, RequirementNotFound if there was some type of failure to install requirements. """ - if done is None: done = {domain} else: done.add(domain) - integration = await async_get_integration(self.hass, domain) - if self.hass.config.skip_pip: - return integration + return await async_get_integration(self.hass, domain) cache = self.integrations_with_reqs int_or_fut = cache.get(domain, UNDEFINED) @@ -170,19 +172,19 @@ class RequirementsManager: if int_or_fut is not UNDEFINED: return cast(Integration, int_or_fut) - event = cache[domain] = self.hass.loop.create_future() + future = cache[domain] = self.hass.loop.create_future() try: + integration = await async_get_integration(self.hass, domain) await self._async_process_integration(integration, done) except Exception: del cache[domain] - if not event.done(): - event.set_result(None) raise + finally: + _set_result_unless_done(future) cache[domain] = integration - if not event.done(): - event.set_result(None) + _set_result_unless_done(future) return integration async def _async_process_integration( diff --git a/homeassistant/runner.py b/homeassistant/runner.py index f036c7d6322..4e2326d4ea7 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -107,6 +107,7 @@ class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): def new_event_loop(self) -> asyncio.AbstractEventLoop: """Get the event loop.""" loop: asyncio.AbstractEventLoop = super().new_event_loop() + setattr(loop, "_thread_ident", threading.get_ident()) loop.set_exception_handler(_async_loop_exception_handler) if self.debug: loop.set_debug(True) diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 5772fce6955..fab70e31d9d 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -459,7 +459,9 @@ async def _async_setup_component( # Cleanup hass.data[DATA_SETUP].pop(domain, None) - hass.bus.async_fire(EVENT_COMPONENT_LOADED, EventComponentLoaded(component=domain)) + hass.bus.async_fire_internal( + EVENT_COMPONENT_LOADED, EventComponentLoaded(component=domain) + ) return True diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index 0cf9fc992c5..19c20207e1d 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -52,8 +52,7 @@ def run_callback_threadsafe( Return a concurrent.futures.Future to access the result. """ - ident = loop.__dict__.get("_thread_ident") - if ident is not None and ident == threading.get_ident(): + if (ident := loop.__dict__.get("_thread_ident")) and ident == threading.get_ident(): raise RuntimeError("Cannot be called from within the event loop") future: concurrent.futures.Future[_T] = concurrent.futures.Future() diff --git a/homeassistant/util/dt.py b/homeassistant/util/dt.py index 2f2b415144f..923838a48a5 100644 --- a/homeassistant/util/dt.py +++ b/homeassistant/util/dt.py @@ -286,36 +286,78 @@ def parse_time(time_str: str) -> dt.time | None: return None -def get_age(date: dt.datetime) -> str: - """Take a datetime and return its "age" as a string. - - The age can be in second, minute, hour, day, month or year. Only the - biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will - be returned. - Make sure date is not in the future, or else it won't work. - """ +def _get_timestring(timediff: float, precision: int = 1) -> str: + """Return a string representation of a time diff.""" def formatn(number: int, unit: str) -> str: """Add "unit" if it's plural.""" if number == 1: - return f"1 {unit}" - return f"{number:d} {unit}s" + return f"1 {unit} " + return f"{number:d} {unit}s " + + if timediff == 0.0: + return "0 seconds" + + units = ("year", "month", "day", "hour", "minute", "second") + + factors = (365 * 24 * 60 * 60, 30 * 24 * 60 * 60, 24 * 60 * 60, 60 * 60, 60, 1) + + result_string: str = "" + current_precision = 0 + + for i, current_factor in enumerate(factors): + selected_unit = units[i] + if timediff < current_factor: + continue + current_precision = current_precision + 1 + if current_precision == precision: + return ( + result_string + formatn(round(timediff / current_factor), selected_unit) + ).rstrip() + curr_diff = int(timediff // current_factor) + result_string += formatn(curr_diff, selected_unit) + timediff -= (curr_diff) * current_factor + + return result_string.rstrip() + + +def get_age(date: dt.datetime, precision: int = 1) -> str: + """Take a datetime and return its "age" as a string. + + The age can be in second, minute, hour, day, month and year. + + depth number of units will be returned, with the last unit rounded + + The date must be in the past or a ValueException will be raised. + """ delta = (now() - date).total_seconds() + rounded_delta = round(delta) - units = ["second", "minute", "hour", "day", "month"] - factors = [60, 60, 24, 30, 12] - selected_unit = "year" + if rounded_delta < 0: + raise ValueError("Time value is in the future") + return _get_timestring(rounded_delta, precision) - for i, next_factor in enumerate(factors): - if rounded_delta < next_factor: - selected_unit = units[i] - break - delta /= next_factor - rounded_delta = round(delta) - return formatn(rounded_delta, selected_unit) +def get_time_remaining(date: dt.datetime, precision: int = 1) -> str: + """Take a datetime and return its "age" as a string. + + The age can be in second, minute, hour, day, month and year. + + depth number of units will be returned, with the last unit rounded + + The date must be in the future or a ValueException will be raised. + """ + + delta = (date - now()).total_seconds() + + rounded_delta = round(delta) + + if rounded_delta < 0: + raise ValueError("Time value is in the past") + + return _get_timestring(rounded_delta, precision) def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> list[int]: diff --git a/homeassistant/util/logging.py b/homeassistant/util/logging.py index 8709186face..ab163578846 100644 --- a/homeassistant/util/logging.py +++ b/homeassistant/util/logging.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio from collections.abc import Callable, Coroutine from functools import partial, wraps import inspect @@ -12,7 +11,12 @@ import queue import traceback from typing import Any, TypeVar, TypeVarTuple, cast, overload -from homeassistant.core import HomeAssistant, callback, is_callback +from homeassistant.core import ( + HassJobType, + HomeAssistant, + callback, + get_hassjob_callable_job_type, +) _T = TypeVar("_T") _Ts = TypeVarTuple("_Ts") @@ -129,34 +133,38 @@ def _callback_wrapper( @overload def catch_log_exception( - func: Callable[[*_Ts], Coroutine[Any, Any, Any]], format_err: Callable[[*_Ts], Any] + func: Callable[[*_Ts], Coroutine[Any, Any, Any]], + format_err: Callable[[*_Ts], Any], + job_type: HassJobType | None = None, ) -> Callable[[*_Ts], Coroutine[Any, Any, None]]: ... @overload def catch_log_exception( - func: Callable[[*_Ts], Any], format_err: Callable[[*_Ts], Any] + func: Callable[[*_Ts], Any], + format_err: Callable[[*_Ts], Any], + job_type: HassJobType | None = None, ) -> Callable[[*_Ts], None] | Callable[[*_Ts], Coroutine[Any, Any, None]]: ... def catch_log_exception( - func: Callable[[*_Ts], Any], format_err: Callable[[*_Ts], Any] + func: Callable[[*_Ts], Any], + format_err: Callable[[*_Ts], Any], + job_type: HassJobType | None = None, ) -> Callable[[*_Ts], None] | Callable[[*_Ts], Coroutine[Any, Any, None]]: """Decorate a function func to catch and log exceptions. If func is a coroutine function, a coroutine function will be returned. If func is a callback, a callback will be returned. """ - # Check for partials to properly determine if coroutine function - check_func = func - while isinstance(check_func, partial): - check_func = check_func.func # type: ignore[unreachable] # false positive + if job_type is None: + job_type = get_hassjob_callable_job_type(func) - if asyncio.iscoroutinefunction(check_func): + if job_type is HassJobType.Coroutinefunction: async_func = cast(Callable[[*_Ts], Coroutine[Any, Any, None]], func) return wraps(async_func)(partial(_async_wrapper, async_func, format_err)) # type: ignore[return-value] - if is_callback(check_func): + if job_type is HassJobType.Callback: return wraps(func)(partial(_callback_wrapper, func, format_err)) # type: ignore[return-value] return wraps(func)(partial(_sync_wrapper, func, format_err)) # type: ignore[return-value] diff --git a/homeassistant/util/uuid.py b/homeassistant/util/uuid.py index d924eab934d..b7e9c2ae4f8 100644 --- a/homeassistant/util/uuid.py +++ b/homeassistant/util/uuid.py @@ -9,4 +9,4 @@ def random_uuid_hex() -> str: This uuid should not be used for cryptographically secure operations. """ - return "%032x" % getrandbits(32 * 4) + return f"{getrandbits(32 * 4):032x}" diff --git a/mypy.ini b/mypy.ini index 216d43322a4..611dd176fbf 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2112,6 +2112,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.husqvarna_automower.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.hydrawise.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pyproject.toml b/pyproject.toml index 4b3b15f7bde..d3f2af6bbf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.5.0.dev0" +version = "2024.6.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -53,7 +53,7 @@ dependencies = [ "cryptography==42.0.5", "Pillow==10.3.0", "pyOpenSSL==24.1.0", - "orjson==3.10.1", + "orjson==3.9.15", "packaging>=23.1", "pip>=21.3.1", "psutil-home-assistant==0.0.1", @@ -251,7 +251,7 @@ disable = [ "nested-min-max", # PLW3301 "pointless-statement", # B018 "raise-missing-from", # B904 - # "redefined-builtin", # A001, ruff is way more stricter, needs work + "redefined-builtin", # A001 "try-except-raise", # TRY302 "unused-argument", # ARG001, we don't use it "unused-format-string-argument", #F507 @@ -659,10 +659,11 @@ filterwarnings = [ ] [tool.ruff] -required-version = ">=0.3.7" +required-version = ">=0.4.2" [tool.ruff.lint] select = [ + "A001", # Variable {name} is shadowing a Python builtin "B002", # Python does not support the unary prefix increment "B005", # Using .strip() with multi-character strings is misleading "B007", # Loop control variable {name} not used within loop body @@ -704,6 +705,7 @@ select = [ "RUF006", # Store a reference to the return value of asyncio.create_task "RUF013", # PEP 484 prohibits implicit Optional "RUF018", # Avoid assignment expressions in assert statements + "RUF019", # Unnecessary key check before dictionary access # "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up "S102", # Use of exec detected "S103", # bad-file-permissions diff --git a/requirements.txt b/requirements.txt index 34ee8237921..44c60aec07a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,7 +28,7 @@ PyJWT==2.8.0 cryptography==42.0.5 Pillow==10.3.0 pyOpenSSL==24.1.0 -orjson==3.10.1 +orjson==3.9.15 packaging>=23.1 pip>=21.3.1 psutil-home-assistant==0.0.1 diff --git a/requirements_all.txt b/requirements_all.txt index b4c81ac30de..011b3b60d4f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -45,7 +45,7 @@ Mastodon.py==1.8.1 Pillow==10.3.0 # homeassistant.components.plex -PlexAPI==4.15.11 +PlexAPI==4.15.12 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -140,7 +140,7 @@ TwitterAPI==2.7.12 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==2.1.1 +accuweather==3.0.0 # homeassistant.components.adax adax==0.4.0 @@ -204,7 +204,7 @@ aioaseko==0.1.1 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.3.4 +aioautomower==2024.4.4 # homeassistant.components.azure_devops aioazuredevops==2.0.0 @@ -222,7 +222,7 @@ aiocomelit==0.9.0 aiodhcpwatcher==1.0.0 # homeassistant.components.dhcp -aiodiscover==2.0.0 +aiodiscover==2.1.0 # homeassistant.components.dnsip aiodns==3.2.0 @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.1.0 +aioesphomeapi==24.3.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -318,7 +318,7 @@ aioopenexchangerates==0.4.0 aiooui==0.1.5 # homeassistant.components.pegel_online -aiopegelonline==0.0.9 +aiopegelonline==0.0.10 # homeassistant.components.acmeda aiopulse==0.4.4 @@ -367,6 +367,9 @@ aioskybell==22.7.0 # homeassistant.components.slimproto aioslimproto==3.0.0 +# homeassistant.components.solaredge +aiosolaredge==0.2.0 + # homeassistant.components.steamist aiosteamist==0.3.2 @@ -383,7 +386,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==74 +aiounifi==76 # homeassistant.components.vlc_telnet aiovlc==0.1.0 @@ -538,7 +541,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.zha -bellows==0.38.1 +bellows==0.38.2 # homeassistant.components.bmw_connected_drive bimmer-connected[china]==0.14.6 @@ -576,10 +579,10 @@ bluemaestro-ble==0.2.3 # bluepy==1.3.0 # homeassistant.components.bluetooth -bluetooth-adapters==0.18.0 +bluetooth-adapters==0.19.0 # homeassistant.components.bluetooth -bluetooth-auto-recovery==1.4.1 +bluetooth-auto-recovery==1.4.2 # homeassistant.components.bluetooth # homeassistant.components.ld2410_ble @@ -694,7 +697,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==6.0.2 +deebot-client==7.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -735,7 +738,7 @@ dovado==0.4.1 dremel3dpy==2.1.1 # homeassistant.components.drop_connect -dropmqttapi==1.0.2 +dropmqttapi==1.0.3 # homeassistant.components.dsmr dsmr-parser==1.3.1 @@ -806,6 +809,9 @@ env-canada==0.6.0 # homeassistant.components.season ephem==4.1.5 +# homeassistant.components.epic_games_store +epicstore-api==0.1.7 + # homeassistant.components.epion epion==0.0.3 @@ -934,7 +940,7 @@ georss-qld-bushfire-alert-client==0.7 getmac==0.9.4 # homeassistant.components.gios -gios==3.2.2 +gios==4.0.0 # homeassistant.components.gitter gitterpy==0.1.7 @@ -946,7 +952,7 @@ glances-api==0.6.0 goalzero==0.2.2 # homeassistant.components.goodwe -goodwe==0.2.32 +goodwe==0.3.2 # homeassistant.components.google_mail # homeassistant.components.google_tasks @@ -974,7 +980,7 @@ goslide-api==0.5.1 gotailwind==0.2.2 # homeassistant.components.govee_ble -govee-ble==0.31.0 +govee-ble==0.31.2 # homeassistant.components.govee_light_local govee-local-api==1.4.4 @@ -1020,7 +1026,7 @@ ha-av==10.1.1 ha-ffmpeg==3.2.0 # homeassistant.components.iotawatt -ha-iotawattpy==0.1.1 +ha-iotawattpy==0.1.2 # homeassistant.components.philips_js ha-philipsjs==3.1.1 @@ -1069,13 +1075,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.46 +holidays==0.47 # homeassistant.components.frontend -home-assistant-frontend==20240404.2 +home-assistant-frontend==20240424.1 # homeassistant.components.conversation -home-assistant-intents==2024.4.3 +home-assistant-intents==2024.4.24 # homeassistant.components.home_connect homeconnect==0.7.2 @@ -1113,7 +1119,7 @@ ibmiotf==0.3.4 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==7.0.3 +ical==8.0.0 # homeassistant.components.ping icmplib==3.0 @@ -1329,7 +1335,7 @@ motionblindsble==0.0.9 motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.2.1.150.6 +mozart-api==3.4.1.8.5 # homeassistant.components.mullvad mullvad-api==1.0.0 @@ -1362,7 +1368,7 @@ netdata==1.1.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==2.2.2 +nettigo-air-monitor==3.0.0 # homeassistant.components.neurio_energy neurio==0.3.1 @@ -1377,7 +1383,7 @@ nextcloudmonitor==1.5.0 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==2.1.0 +nextdns==3.0.0 # homeassistant.components.nibe_heatpump nibe==2.8.0 @@ -1492,7 +1498,7 @@ orvibo==1.1.2 ourgroceries==1.5.4 # homeassistant.components.ovo_energy -ovoenergy==1.3.1 +ovoenergy==2.0.0 # homeassistant.components.p1_monitor p1monitor==3.0.0 @@ -1542,7 +1548,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.37.1 +plugwise==0.37.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1655,7 +1661,7 @@ pyEmby==1.9 pyHik==0.3.2 # homeassistant.components.rfxtrx -pyRFXtrx==0.31.0 +pyRFXtrx==0.31.1 # homeassistant.components.sony_projector pySDCP==1 @@ -1812,7 +1818,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.7.7 +pyfibaro==0.7.8 # homeassistant.components.fido pyfido==2.1.2 @@ -1833,7 +1839,7 @@ pyforked-daapd==0.1.14 pyfreedompro==1.1.0 # homeassistant.components.fritzbox -pyfritzhome==0.6.10 +pyfritzhome==0.6.11 # homeassistant.components.ifttt pyfttt==0.3 @@ -1941,7 +1947,7 @@ pylibrespot-java==0.1.1 pylitejet==0.6.2 # homeassistant.components.litterrobot -pylitterbot==2023.4.11 +pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta pylutron-caseta==0.20.0 @@ -2090,7 +2096,7 @@ pyrecswitch==1.0.2 pyrepetierng==0.1.0 # homeassistant.components.risco -pyrisco==0.6.0 +pyrisco==0.6.1 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 @@ -2574,9 +2580,6 @@ soco==0.30.3 # homeassistant.components.solaredge_local solaredge-local==0.2.3 -# homeassistant.components.solaredge -solaredge==0.0.2 - # homeassistant.components.solax solax==3.1.0 @@ -2734,7 +2737,7 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2023.2 +total-connect-client==2023.12.1 # homeassistant.components.tplink_lte tp-connected==0.0.4 @@ -2869,7 +2872,7 @@ wirelesstagpy==0.8.1 wled==0.17.0 # homeassistant.components.wolflink -wolf-comm==0.0.6 +wolf-comm==0.0.7 # homeassistant.components.wyoming wyoming==1.5.3 @@ -2914,7 +2917,7 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.2 +yolink-api==0.4.3 # homeassistant.components.youless youless-api==1.0.1 @@ -2938,7 +2941,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.114 +zha-quirks==0.0.115 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 @@ -2959,7 +2962,7 @@ zigpy-zigate==0.12.0 zigpy-znp==0.12.1 # homeassistant.components.zha -zigpy==0.63.5 +zigpy==0.64.0 # homeassistant.components.zoneminder zm-py==0.5.4 diff --git a/requirements_test.txt b/requirements_test.txt index f13e0e6a36b..7fa9b3d8c89 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -8,15 +8,15 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt astroid==3.1.0 -coverage==7.4.4 +coverage==7.5.0 freezegun==1.4.0 mock-open==1.4.0 -mypy-dev==1.10.0a3 +mypy==1.10.0 pre-commit==3.7.0 pydantic==1.10.12 pylint==3.1.0 pylint-per-file-ignores==1.3.2 -pipdeptree==2.16.1 +pipdeptree==2.17.0 pytest-asyncio==0.23.6 pytest-aiohttp==1.0.5 pytest-cov==5.0.0 @@ -29,7 +29,7 @@ pytest-unordered==0.6.0 pytest-picked==0.5.0 pytest-xdist==3.5.0 pytest==8.1.1 -requests-mock==1.11.0 +requests-mock==1.12.1 respx==0.21.0 syrupy==4.6.1 tqdm==4.66.2 @@ -50,4 +50,4 @@ types-pytz==2024.1.0.20240203 types-PyYAML==6.0.12.20240311 types-requests==2.31.0.3 types-xmltodict==0.13.0.3 -uv==0.1.27 +uv==0.1.35 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0ba05afc18c..fffc9d9b2c1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -39,7 +39,7 @@ HATasmota==0.8.0 Pillow==10.3.0 # homeassistant.components.plex -PlexAPI==4.15.11 +PlexAPI==4.15.12 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -119,7 +119,7 @@ Tami4EdgeAPI==2.1 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==2.1.1 +accuweather==3.0.0 # homeassistant.components.adax adax==0.4.0 @@ -183,7 +183,7 @@ aioaseko==0.1.1 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.3.4 +aioautomower==2024.4.4 # homeassistant.components.azure_devops aioazuredevops==2.0.0 @@ -201,7 +201,7 @@ aiocomelit==0.9.0 aiodhcpwatcher==1.0.0 # homeassistant.components.dhcp -aiodiscover==2.0.0 +aiodiscover==2.1.0 # homeassistant.components.dnsip aiodns==3.2.0 @@ -222,7 +222,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.1.0 +aioesphomeapi==24.3.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -291,7 +291,7 @@ aioopenexchangerates==0.4.0 aiooui==0.1.5 # homeassistant.components.pegel_online -aiopegelonline==0.0.9 +aiopegelonline==0.0.10 # homeassistant.components.acmeda aiopulse==0.4.4 @@ -340,6 +340,9 @@ aioskybell==22.7.0 # homeassistant.components.slimproto aioslimproto==3.0.0 +# homeassistant.components.solaredge +aiosolaredge==0.2.0 + # homeassistant.components.steamist aiosteamist==0.3.2 @@ -356,7 +359,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.5.6 # homeassistant.components.unifi -aiounifi==74 +aiounifi==76 # homeassistant.components.vlc_telnet aiovlc==0.1.0 @@ -463,7 +466,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.zha -bellows==0.38.1 +bellows==0.38.2 # homeassistant.components.bmw_connected_drive bimmer-connected[china]==0.14.6 @@ -491,10 +494,10 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.bluetooth -bluetooth-adapters==0.18.0 +bluetooth-adapters==0.19.0 # homeassistant.components.bluetooth -bluetooth-auto-recovery==1.4.1 +bluetooth-auto-recovery==1.4.2 # homeassistant.components.bluetooth # homeassistant.components.ld2410_ble @@ -572,7 +575,7 @@ dbus-fast==2.21.1 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==6.0.2 +deebot-client==7.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -607,7 +610,7 @@ discovery30303==0.2.1 dremel3dpy==2.1.1 # homeassistant.components.drop_connect -dropmqttapi==1.0.2 +dropmqttapi==1.0.3 # homeassistant.components.dsmr dsmr-parser==1.3.1 @@ -660,6 +663,9 @@ env-canada==0.6.0 # homeassistant.components.season ephem==4.1.5 +# homeassistant.components.epic_games_store +epicstore-api==0.1.7 + # homeassistant.components.epion epion==0.0.3 @@ -766,7 +772,7 @@ georss-qld-bushfire-alert-client==0.7 getmac==0.9.4 # homeassistant.components.gios -gios==3.2.2 +gios==4.0.0 # homeassistant.components.glances glances-api==0.6.0 @@ -775,7 +781,7 @@ glances-api==0.6.0 goalzero==0.2.2 # homeassistant.components.goodwe -goodwe==0.2.32 +goodwe==0.3.2 # homeassistant.components.google_mail # homeassistant.components.google_tasks @@ -797,7 +803,7 @@ googlemaps==2.5.1 gotailwind==0.2.2 # homeassistant.components.govee_ble -govee-ble==0.31.0 +govee-ble==0.31.2 # homeassistant.components.govee_light_local govee-local-api==1.4.4 @@ -834,7 +840,7 @@ ha-av==10.1.1 ha-ffmpeg==3.2.0 # homeassistant.components.iotawatt -ha-iotawattpy==0.1.1 +ha-iotawattpy==0.1.2 # homeassistant.components.philips_js ha-philipsjs==3.1.1 @@ -871,13 +877,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.46 +holidays==0.47 # homeassistant.components.frontend -home-assistant-frontend==20240404.2 +home-assistant-frontend==20240424.1 # homeassistant.components.conversation -home-assistant-intents==2024.4.3 +home-assistant-intents==2024.4.24 # homeassistant.components.home_connect homeconnect==0.7.2 @@ -906,7 +912,7 @@ ibeacon-ble==1.2.0 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==7.0.3 +ical==8.0.0 # homeassistant.components.ping icmplib==3.0 @@ -1071,7 +1077,7 @@ motionblindsble==0.0.9 motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==3.2.1.150.6 +mozart-api==3.4.1.8.5 # homeassistant.components.mullvad mullvad-api==1.0.0 @@ -1098,7 +1104,7 @@ nessclient==1.0.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==2.2.2 +nettigo-air-monitor==3.0.0 # homeassistant.components.nexia nexia==2.0.8 @@ -1110,7 +1116,7 @@ nextcloudmonitor==1.5.0 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==2.1.0 +nextdns==3.0.0 # homeassistant.components.nibe_heatpump nibe==2.8.0 @@ -1186,7 +1192,7 @@ oralb-ble==0.17.6 ourgroceries==1.5.4 # homeassistant.components.ovo_energy -ovoenergy==1.3.1 +ovoenergy==2.0.0 # homeassistant.components.p1_monitor p1monitor==3.0.0 @@ -1219,7 +1225,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.37.1 +plugwise==0.37.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1305,7 +1311,7 @@ pyDuotecno==2024.3.2 pyElectra==1.2.0 # homeassistant.components.rfxtrx -pyRFXtrx==0.31.0 +pyRFXtrx==0.31.1 # homeassistant.components.tibber pyTibber==0.28.2 @@ -1411,7 +1417,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.7.7 +pyfibaro==0.7.8 # homeassistant.components.fido pyfido==2.1.2 @@ -1429,7 +1435,7 @@ pyforked-daapd==0.1.14 pyfreedompro==1.1.0 # homeassistant.components.fritzbox -pyfritzhome==0.6.10 +pyfritzhome==0.6.11 # homeassistant.components.ifttt pyfttt==0.3 @@ -1516,7 +1522,7 @@ pylibrespot-java==0.1.1 pylitejet==0.6.2 # homeassistant.components.litterrobot -pylitterbot==2023.4.11 +pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta pylutron-caseta==0.20.0 @@ -1632,7 +1638,7 @@ pyqwikswitch==0.93 pyrainbird==4.0.2 # homeassistant.components.risco -pyrisco==0.6.0 +pyrisco==0.6.1 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 @@ -1990,9 +1996,6 @@ snapcast==2.3.6 # homeassistant.components.sonos soco==0.30.3 -# homeassistant.components.solaredge -solaredge==0.0.2 - # homeassistant.components.solax solax==3.1.0 @@ -2111,7 +2114,7 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2023.2 +total-connect-client==2023.12.1 # homeassistant.components.tplink_omada tplink-omada-client==1.3.12 @@ -2225,7 +2228,7 @@ wiffi==1.1.2 wled==0.17.0 # homeassistant.components.wolflink -wolf-comm==0.0.6 +wolf-comm==0.0.7 # homeassistant.components.wyoming wyoming==1.5.3 @@ -2264,7 +2267,7 @@ yalexs==3.0.1 yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.2 +yolink-api==0.4.3 # homeassistant.components.youless youless-api==1.0.1 @@ -2285,7 +2288,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.114 +zha-quirks==0.0.115 # homeassistant.components.zha zigpy-deconz==0.23.1 @@ -2300,7 +2303,7 @@ zigpy-zigate==0.12.0 zigpy-znp==0.12.1 # homeassistant.components.zha -zigpy==0.63.5 +zigpy==0.64.0 # homeassistant.components.zwave_js zwave-js-server-python==0.55.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 46ade953da2..05e98a945d2 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.2.6 -ruff==0.3.7 +ruff==0.4.2 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 7fc0907e756..a5db9997d9d 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -17,7 +17,10 @@ from typing import Any from homeassistant.util.yaml.loader import load_yaml from script.hassfest.model import Integration -COMMENT_REQUIREMENTS = ( +# Requirements which can't be installed on all systems because they rely on additional +# system packages. Requirements listed in EXCLUDED_REQUIREMENTS_ALL will be commented-out +# in requirements_all.txt and requirements_test_all.txt. +EXCLUDED_REQUIREMENTS_ALL = { "atenpdu", # depends on pysnmp which is not maintained at this time "avea", # depends on bluepy "avion", @@ -36,10 +39,39 @@ COMMENT_REQUIREMENTS = ( "pyuserinput", "tensorflow", "tf-models-official", -) +} -COMMENT_REQUIREMENTS_NORMALIZED = { - commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS +# Requirements excluded by EXCLUDED_REQUIREMENTS_ALL which should be included when +# building integration wheels for all architectures. +INCLUDED_REQUIREMENTS_WHEELS = { + "decora-wifi", + "evdev", + "pycups", + "python-gammu", + "pyuserinput", +} + + +# Requirements to exclude or include when running github actions. +# Requirements listed in "exclude" will be commented-out in +# requirements_all_{action}.txt +# Requirements listed in "include" must be listed in EXCLUDED_REQUIREMENTS_CI, and +# will be included in requirements_all_{action}.txt + +OVERRIDDEN_REQUIREMENTS_ACTIONS = { + "pytest": {"exclude": set(), "include": {"python-gammu"}}, + "wheels_aarch64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + # Pandas has issues building on armhf, it is expected they + # will drop the platform in the near future (they consider it + # "flimsy" on 386). The following packages depend on pandas, + # so we comment them out. + "wheels_armhf": { + "exclude": {"env-canada", "noaa-coops", "pyezviz", "pykrakenapi"}, + "include": INCLUDED_REQUIREMENTS_WHEELS, + }, + "wheels_armv7": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "wheels_amd64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "wheels_i386": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, } IGNORE_PIN = ("colorlog>2.1,<3", "urllib3") @@ -254,6 +286,12 @@ def gather_recursive_requirements( return reqs +def _normalize_package_name(package_name: str) -> str: + """Normalize a package name.""" + # pipdeptree needs lowercase and dash instead of underscore or period as separator + return package_name.lower().replace("_", "-").replace(".", "-") + + def normalize_package_name(requirement: str) -> str: """Return a normalized package name from a requirement string.""" # This function is also used in hassfest. @@ -262,12 +300,24 @@ def normalize_package_name(requirement: str) -> str: return "" # pipdeptree needs lowercase and dash instead of underscore or period as separator - return match.group(1).lower().replace("_", "-").replace(".", "-") + return _normalize_package_name(match.group(1)) def comment_requirement(req: str) -> bool: """Comment out requirement. Some don't install on all systems.""" - return normalize_package_name(req) in COMMENT_REQUIREMENTS_NORMALIZED + return normalize_package_name(req) in EXCLUDED_REQUIREMENTS_ALL + + +def process_action_requirement(req: str, action: str) -> str: + """Process requirement for a specific github action.""" + normalized_package_name = normalize_package_name(req) + if normalized_package_name in OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["exclude"]: + return f"# {req}" + if normalized_package_name in OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["include"]: + return req + if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL: + return f"# {req}" + return req def gather_modules() -> dict[str, list[str]] | None: @@ -353,6 +403,16 @@ def generate_requirements_list(reqs: dict[str, list[str]]) -> str: return "".join(output) +def generate_action_requirements_list(reqs: dict[str, list[str]], action: str) -> str: + """Generate a pip file based on requirements.""" + output = [] + for pkg, requirements in sorted(reqs.items(), key=itemgetter(0)): + output.extend(f"\n# {req}" for req in sorted(requirements)) + processed_pkg = process_action_requirement(pkg, action) + output.append(f"\n{processed_pkg}\n") + return "".join(output) + + def requirements_output() -> str: """Generate output for requirements.""" output = [ @@ -379,6 +439,18 @@ def requirements_all_output(reqs: dict[str, list[str]]) -> str: return "".join(output) +def requirements_all_action_output(reqs: dict[str, list[str]], action: str) -> str: + """Generate output for requirements_all_{action}.""" + output = [ + f"# Home Assistant Core, full dependency set for {action}\n", + GENERATED_MESSAGE, + "-r requirements.txt\n", + ] + output.append(generate_action_requirements_list(reqs, action)) + + return "".join(output) + + def requirements_test_all_output(reqs: dict[str, list[str]]) -> str: """Generate output for test_requirements.""" output = [ @@ -459,7 +531,7 @@ def diff_file(filename: str, content: str) -> list[str]: ) -def main(validate: bool) -> int: +def main(validate: bool, ci: bool) -> int: """Run the script.""" if not os.path.isfile("requirements_all.txt"): print("Run this from HA root dir") @@ -472,17 +544,28 @@ def main(validate: bool) -> int: reqs_file = requirements_output() reqs_all_file = requirements_all_output(data) + reqs_all_action_files = { + action: requirements_all_action_output(data, action) + for action in OVERRIDDEN_REQUIREMENTS_ACTIONS + } reqs_test_all_file = requirements_test_all_output(data) + # Always calling requirements_pre_commit_output is intentional to ensure + # the code is called by the pre-commit hooks. reqs_pre_commit_file = requirements_pre_commit_output() constraints = gather_constraints() - files = ( + files = [ ("requirements.txt", reqs_file), ("requirements_all.txt", reqs_all_file), ("requirements_test_pre_commit.txt", reqs_pre_commit_file), ("requirements_test_all.txt", reqs_test_all_file), ("homeassistant/package_constraints.txt", constraints), - ) + ] + if ci: + files.extend( + (f"requirements_all_{action}.txt", reqs_all_file) + for action, reqs_all_file in reqs_all_action_files.items() + ) if validate: errors = [] @@ -511,4 +594,5 @@ def main(validate: bool) -> int: if __name__ == "__main__": _VAL = sys.argv[-1] == "validate" - sys.exit(main(_VAL)) + _CI = sys.argv[-1] == "ci" + sys.exit(main(_VAL, _CI)) diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index 6fe7700cb3f..66796d4dd0d 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -32,7 +32,11 @@ class ImportCollector(ast.NodeVisitor): self._cur_fil_dir = fil.relative_to(self.integration.path) self.referenced[self._cur_fil_dir] = set() - self.visit(ast.parse(fil.read_text())) + try: + self.visit(ast.parse(fil.read_text())) + except SyntaxError as e: + e.add_note(f"File: {fil}") + raise self._cur_fil_dir = None def _add_reference(self, reference_domain: str) -> None: @@ -148,10 +152,12 @@ IGNORE_VIOLATIONS = { ("demo", "manual"), # This would be a circular dep ("http", "network"), + ("http", "cloud"), # This would be a circular dep ("zha", "homeassistant_hardware"), ("zha", "homeassistant_sky_connect"), ("zha", "homeassistant_yellow"), + ("homeassistant_sky_connect", "zha"), # This should become a helper method that integrations can submit data to ("websocket_api", "lovelace"), ("websocket_api", "shopping_list"), diff --git a/script/hassfest/requirements.py b/script/hassfest/requirements.py index ee63bf07f90..2c4ed47b158 100644 --- a/script/hassfest/requirements.py +++ b/script/hassfest/requirements.py @@ -15,13 +15,13 @@ from awesomeversion import AwesomeVersion, AwesomeVersionStrategy from tqdm import tqdm import homeassistant.util.package as pkg_util -from script.gen_requirements_all import COMMENT_REQUIREMENTS, normalize_package_name +from script.gen_requirements_all import ( + EXCLUDED_REQUIREMENTS_ALL, + normalize_package_name, +) from .model import Config, Integration -IGNORE_PACKAGES = { - commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS -} PACKAGE_REGEX = re.compile( r"^(?:--.+\s)?([-_,\.\w\d\[\]]+)(==|>=|<=|~=|!=|<|>|===)*(.*)$" ) @@ -116,7 +116,7 @@ def validate_requirements(integration: Integration) -> None: f"Failed to normalize package name from requirement {req}", ) return - if package in IGNORE_PACKAGES: + if package in EXCLUDED_REQUIREMENTS_ALL: continue integration_requirements.add(req) integration_packages.add(package) diff --git a/tests/common.py b/tests/common.py index b12f0ed37da..7bb16ce5c54 100644 --- a/tests/common.py +++ b/tests/common.py @@ -22,6 +22,7 @@ from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import unused_port as get_test_instance_port # noqa: F401 import pytest +from syrupy import SnapshotAssertion import voluptuous as vol from homeassistant import auth, bootstrap, config_entries, loader @@ -448,10 +449,11 @@ def async_fire_mqtt_message( msg.payload = payload msg.qos = qos msg.retain = retain + msg.timestamp = time.monotonic() mqtt_data: MqttData = hass.data["mqtt"] assert mqtt_data.client - mqtt_data.client._mqtt_handle_message(msg) + mqtt_data.client._async_mqtt_on_message(Mock(), None, msg) fire_mqtt_message = threadsafe_callback_factory(async_fire_mqtt_message) @@ -1733,3 +1735,22 @@ def setup_test_component_platform( mock_platform(hass, f"test.{domain}", platform, built_in=built_in) return platform + + +async def snapshot_platform( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + config_entry_id: str, +) -> None: + """Snapshot a platform.""" + entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id) + assert entity_entries + assert ( + len({entity_entry.domain for entity_entry in entity_entries}) == 1 + ), "Please limit the loaded platforms to 1 platform." + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert entity_entry.disabled_by is None, "Please enable all entities." + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") diff --git a/tests/components/accuweather/snapshots/test_weather.ambr b/tests/components/accuweather/snapshots/test_weather.ambr index 081e7bf595a..1542d22aa7b 100644 --- a/tests/components/accuweather/snapshots/test_weather.ambr +++ b/tests/components/accuweather/snapshots/test_weather.ambr @@ -1,158 +1,4 @@ # serializer version: 1 -# name: test_forecast_service - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 58, - 'condition': 'lightning-rainy', - 'datetime': '2020-07-26T05:00:00+00:00', - 'precipitation': 2.5, - 'precipitation_probability': 60, - 'temperature': 29.5, - 'templow': 15.4, - 'uv_index': 5, - 'wind_bearing': 166, - 'wind_gust_speed': 29.6, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 52, - 'condition': 'partlycloudy', - 'datetime': '2020-07-27T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 26.2, - 'templow': 15.9, - 'uv_index': 7, - 'wind_bearing': 297, - 'wind_gust_speed': 14.8, - 'wind_speed': 9.3, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 65, - 'condition': 'partlycloudy', - 'datetime': '2020-07-28T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 31.7, - 'templow': 16.8, - 'uv_index': 7, - 'wind_bearing': 198, - 'wind_gust_speed': 24.1, - 'wind_speed': 16.7, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 45, - 'condition': 'partlycloudy', - 'datetime': '2020-07-29T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 9, - 'temperature': 24.0, - 'templow': 11.7, - 'uv_index': 6, - 'wind_bearing': 293, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 22.2, - 'cloud_coverage': 50, - 'condition': 'partlycloudy', - 'datetime': '2020-07-30T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 1, - 'temperature': 21.4, - 'templow': 12.2, - 'uv_index': 7, - 'wind_bearing': 280, - 'wind_gust_speed': 27.8, - 'wind_speed': 18.5, - }), - ]), - }) -# --- -# name: test_forecast_service[forecast] - dict({ - 'weather.home': dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 58, - 'condition': 'lightning-rainy', - 'datetime': '2020-07-26T05:00:00+00:00', - 'precipitation': 2.5, - 'precipitation_probability': 60, - 'temperature': 29.5, - 'templow': 15.4, - 'uv_index': 5, - 'wind_bearing': 166, - 'wind_gust_speed': 29.6, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 52, - 'condition': 'partlycloudy', - 'datetime': '2020-07-27T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 26.2, - 'templow': 15.9, - 'uv_index': 7, - 'wind_bearing': 297, - 'wind_gust_speed': 14.8, - 'wind_speed': 9.3, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 65, - 'condition': 'partlycloudy', - 'datetime': '2020-07-28T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 31.7, - 'templow': 16.8, - 'uv_index': 7, - 'wind_bearing': 198, - 'wind_gust_speed': 24.1, - 'wind_speed': 16.7, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 45, - 'condition': 'partlycloudy', - 'datetime': '2020-07-29T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 9, - 'temperature': 24.0, - 'templow': 11.7, - 'uv_index': 6, - 'wind_bearing': 293, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 22.2, - 'cloud_coverage': 50, - 'condition': 'partlycloudy', - 'datetime': '2020-07-30T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 1, - 'temperature': 21.4, - 'templow': 12.2, - 'uv_index': 7, - 'wind_bearing': 280, - 'wind_gust_speed': 27.8, - 'wind_speed': 18.5, - }), - ]), - }), - }) -# --- # name: test_forecast_service[get_forecast] dict({ 'forecast': list([ @@ -455,3 +301,67 @@ }), ]) # --- +# name: test_weather[weather.home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'weather', + 'entity_category': None, + 'entity_id': 'weather.home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'accuweather', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '0123456', + 'unit_of_measurement': None, + }) +# --- +# name: test_weather[weather.home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'apparent_temperature': 22.8, + 'attribution': 'Data provided by AccuWeather', + 'cloud_coverage': 10, + 'dew_point': 16.2, + 'friendly_name': 'Home', + 'humidity': 67, + 'precipitation_unit': , + 'pressure': 1012.0, + 'pressure_unit': , + 'supported_features': , + 'temperature': 22.6, + 'temperature_unit': , + 'uv_index': 6, + 'visibility': 16.1, + 'visibility_unit': , + 'wind_bearing': 180, + 'wind_gust_speed': 20.3, + 'wind_speed': 14.5, + 'wind_speed_unit': , + }), + 'context': , + 'entity_id': 'weather.home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'sunny', + }) +# --- diff --git a/tests/components/accuweather/test_sensor.py b/tests/components/accuweather/test_sensor.py index e79e49db96d..127e4d74cd8 100644 --- a/tests/components/accuweather/test_sensor.py +++ b/tests/components/accuweather/test_sensor.py @@ -30,6 +30,7 @@ from tests.common import ( async_fire_time_changed, load_json_array_fixture, load_json_object_fixture, + snapshot_platform, ) @@ -42,14 +43,7 @@ async def test_sensor( """Test states of the sensor.""" with patch("homeassistant.components.accuweather.PLATFORMS", [Platform.SENSOR]): entry = await init_integration(hass) - - entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) - - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: diff --git a/tests/components/accuweather/test_system_health.py b/tests/components/accuweather/test_system_health.py index 6321071eaa5..562c572c830 100644 --- a/tests/components/accuweather/test_system_health.py +++ b/tests/components/accuweather/test_system_health.py @@ -5,6 +5,7 @@ from unittest.mock import Mock from aiohttp import ClientError +from homeassistant.components.accuweather import AccuWeatherData from homeassistant.components.accuweather.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -23,8 +24,10 @@ async def test_accuweather_system_health( await hass.async_block_till_done() hass.data[DOMAIN] = {} - hass.data[DOMAIN]["0123xyz"] = {} - hass.data[DOMAIN]["0123xyz"] = Mock(accuweather=Mock(requests_remaining="42")) + hass.data[DOMAIN]["0123xyz"] = AccuWeatherData( + coordinator_observation=Mock(accuweather=Mock(requests_remaining="42")), + coordinator_daily_forecast=Mock(), + ) info = await get_system_health_info(hass, DOMAIN) @@ -48,8 +51,10 @@ async def test_accuweather_system_health_fail( await hass.async_block_till_done() hass.data[DOMAIN] = {} - hass.data[DOMAIN]["0123xyz"] = {} - hass.data[DOMAIN]["0123xyz"] = Mock(accuweather=Mock(requests_remaining="0")) + hass.data[DOMAIN]["0123xyz"] = AccuWeatherData( + coordinator_observation=Mock(accuweather=Mock(requests_remaining="0")), + coordinator_daily_forecast=Mock(), + ) info = await get_system_health_info(hass, DOMAIN) diff --git a/tests/components/accuweather/test_weather.py b/tests/components/accuweather/test_weather.py index b3237ca2958..d97a5d3da3c 100644 --- a/tests/components/accuweather/test_weather.py +++ b/tests/components/accuweather/test_weather.py @@ -7,34 +7,14 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.accuweather.const import ( - ATTRIBUTION, - UPDATE_INTERVAL_DAILY_FORECAST, -) +from homeassistant.components.accuweather.const import UPDATE_INTERVAL_DAILY_FORECAST from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, - ATTR_WEATHER_APPARENT_TEMPERATURE, - ATTR_WEATHER_CLOUD_COVERAGE, - ATTR_WEATHER_DEW_POINT, - ATTR_WEATHER_HUMIDITY, - ATTR_WEATHER_PRESSURE, - ATTR_WEATHER_TEMPERATURE, - ATTR_WEATHER_UV_INDEX, - ATTR_WEATHER_VISIBILITY, - ATTR_WEATHER_WIND_BEARING, - ATTR_WEATHER_WIND_GUST_SPEED, - ATTR_WEATHER_WIND_SPEED, DOMAIN as WEATHER_DOMAIN, LEGACY_SERVICE_GET_FORECAST, SERVICE_GET_FORECASTS, - WeatherEntityFeature, -) -from homeassistant.const import ( - ATTR_ATTRIBUTION, - ATTR_ENTITY_ID, - ATTR_SUPPORTED_FEATURES, - STATE_UNAVAILABLE, ) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -46,37 +26,18 @@ from tests.common import ( async_fire_time_changed, load_json_array_fixture, load_json_object_fixture, + snapshot_platform, ) from tests.typing import WebSocketGenerator -async def test_weather(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_weather( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test states of the weather without forecast.""" - await init_integration(hass) - - state = hass.states.get("weather.home") - assert state - assert state.state == "sunny" - assert state.attributes.get(ATTR_WEATHER_HUMIDITY) == 67 - assert state.attributes.get(ATTR_WEATHER_PRESSURE) == 1012.0 - assert state.attributes.get(ATTR_WEATHER_TEMPERATURE) == 22.6 - assert state.attributes.get(ATTR_WEATHER_VISIBILITY) == 16.1 - assert state.attributes.get(ATTR_WEATHER_WIND_BEARING) == 180 - assert state.attributes.get(ATTR_WEATHER_WIND_SPEED) == 14.5 # 4.03 m/s -> km/h - assert state.attributes.get(ATTR_WEATHER_APPARENT_TEMPERATURE) == 22.8 - assert state.attributes.get(ATTR_WEATHER_DEW_POINT) == 16.2 - assert state.attributes.get(ATTR_WEATHER_CLOUD_COVERAGE) == 10 - assert state.attributes.get(ATTR_WEATHER_WIND_GUST_SPEED) == 20.3 - assert state.attributes.get(ATTR_WEATHER_UV_INDEX) == 6 - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert ( - state.attributes.get(ATTR_SUPPORTED_FEATURES) - is WeatherEntityFeature.FORECAST_DAILY - ) - - entry = entity_registry.async_get("weather.home") - assert entry - assert entry.unique_id == "0123456" + with patch("homeassistant.components.accuweather.PLATFORMS", [Platform.WEATHER]): + entry = await init_integration(hass) + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: diff --git a/tests/components/airnow/conftest.py b/tests/components/airnow/conftest.py index 1010a45b8fb..db4400f85d3 100644 --- a/tests/components/airnow/conftest.py +++ b/tests/components/airnow/conftest.py @@ -44,7 +44,7 @@ def options_fixture(hass): } -@pytest.fixture(name="data", scope="session") +@pytest.fixture(name="data", scope="package") def data_fixture(): """Define a fixture for response data.""" return json.loads(load_fixture("response.json", "airnow")) diff --git a/tests/components/airvisual_pro/conftest.py b/tests/components/airvisual_pro/conftest.py index 719b25b3cdf..c90eb432c25 100644 --- a/tests/components/airvisual_pro/conftest.py +++ b/tests/components/airvisual_pro/conftest.py @@ -56,7 +56,7 @@ def disconnect_fixture(): return AsyncMock() -@pytest.fixture(name="data", scope="session") +@pytest.fixture(name="data", scope="package") def data_fixture(): """Define an update coordinator data example.""" return json.loads(load_fixture("data.json", "airvisual_pro")) diff --git a/tests/components/alexa/test_common.py b/tests/components/alexa/test_common.py index 0cc4d995efa..9fdcc1c89c1 100644 --- a/tests/components/alexa/test_common.py +++ b/tests/components/alexa/test_common.py @@ -158,14 +158,14 @@ async def assert_power_controller_works( _, response = await assert_request_calls_service( "Alexa.PowerController", "TurnOn", endpoint, on_service, hass ) - for property in response["context"]["properties"]: - assert property["timeOfSample"] == timestamp + for context_property in response["context"]["properties"]: + assert context_property["timeOfSample"] == timestamp _, response = await assert_request_calls_service( "Alexa.PowerController", "TurnOff", endpoint, off_service, hass ) - for property in response["context"]["properties"]: - assert property["timeOfSample"] == timestamp + for context_property in response["context"]["properties"]: + assert context_property["timeOfSample"] == timestamp async def assert_scene_controller_works( diff --git a/tests/components/ambient_network/snapshots/test_sensor.ambr b/tests/components/ambient_network/snapshots/test_sensor.ambr index 377018c54be..fadb15ad015 100644 --- a/tests/components/ambient_network/snapshots/test_sensor.ambr +++ b/tests/components/ambient_network/snapshots/test_sensor.ambr @@ -10,7 +10,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_absolute_pressure', @@ -22,6 +22,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), 'sensor.private': dict({ 'suggested_unit_of_measurement': , }), @@ -38,7 +41,21 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_absolute_pressure-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'pressure', + 'friendly_name': 'Station A Absolute pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_a_absolute_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '977.616536580043', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_daily_rain-entry] EntityRegistryEntrySnapshot({ @@ -332,7 +349,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_irradiance', @@ -344,6 +361,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), 'original_device_class': , 'original_icon': None, @@ -357,7 +377,21 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_irradiance-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'irradiance', + 'friendly_name': 'Station A Irradiance', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_a_irradiance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.64', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_last_rain-entry] EntityRegistryEntrySnapshot({ @@ -368,7 +402,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_last_rain', @@ -393,7 +427,19 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_last_rain-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'timestamp', + 'friendly_name': 'Station A Last rain', + }), + 'context': , + 'entity_id': 'sensor.station_a_last_rain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-10-30T09:45:00+00:00', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_max_daily_gust-entry] EntityRegistryEntrySnapshot({ @@ -464,7 +510,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_monthly_rain', @@ -476,6 +522,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), 'sensor.private': dict({ 'suggested_unit_of_measurement': , }), @@ -492,7 +541,21 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_monthly_rain-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'precipitation', + 'friendly_name': 'Station A Monthly rain', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_a_monthly_rain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_relative_pressure-entry] EntityRegistryEntrySnapshot({ @@ -672,7 +735,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_weekly_rain', @@ -684,6 +747,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), 'sensor.private': dict({ 'suggested_unit_of_measurement': , }), @@ -700,7 +766,21 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_weekly_rain-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'device_class': 'precipitation', + 'friendly_name': 'Station A Weekly rain', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.station_a_weekly_rain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_wind_direction-entry] EntityRegistryEntrySnapshot({ @@ -711,7 +791,7 @@ 'config_entry_id': , 'device_class': None, 'device_id': , - 'disabled_by': , + 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, 'entity_id': 'sensor.station_a_wind_direction', @@ -723,6 +803,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), }), 'original_device_class': None, 'original_icon': None, @@ -736,7 +819,19 @@ }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_wind_direction-state] - None + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by ambientnetwork.net', + 'friendly_name': 'Station A Wind direction', + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.station_a_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11', + }) # --- # name: test_sensors[AA:AA:AA:AA:AA:AA][sensor.station_a_wind_gust-entry] EntityRegistryEntrySnapshot({ diff --git a/tests/components/ambient_network/test_sensor.py b/tests/components/ambient_network/test_sensor.py index b556c0c9c7c..35aa90ffe05 100644 --- a/tests/components/ambient_network/test_sensor.py +++ b/tests/components/ambient_network/test_sensor.py @@ -14,11 +14,12 @@ from homeassistant.helpers import entity_registry as er from .conftest import setup_platform -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform @freeze_time("2023-11-08") @pytest.mark.parametrize("config_entry", ["AA:AA:AA:AA:AA:AA"], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensors( hass: HomeAssistant, open_api: OpenAPI, @@ -30,16 +31,7 @@ async def test_sensors( """Test all sensors under normal operation.""" await setup_platform(True, hass, config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) @freeze_time("2023-11-09") diff --git a/tests/components/analytics_insights/test_sensor.py b/tests/components/analytics_insights/test_sensor.py index e0850bbd55b..3ede971c8f8 100644 --- a/tests/components/analytics_insights/test_sensor.py +++ b/tests/components/analytics_insights/test_sensor.py @@ -16,7 +16,7 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform async def test_all_entities( @@ -32,17 +32,10 @@ async def test_all_entities( [Platform.SENSOR], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - async def test_connection_error( hass: HomeAssistant, diff --git a/tests/components/aosmith/snapshots/test_sensor.ambr b/tests/components/aosmith/snapshots/test_sensor.ambr index 150e0c2934f..7aae9713037 100644 --- a/tests/components/aosmith/snapshots/test_sensor.ambr +++ b/tests/components/aosmith/snapshots/test_sensor.ambr @@ -1,5 +1,43 @@ # serializer version: 1 -# name: test_state[sensor.my_water_heater_energy_usage] +# name: test_state[sensor.my_water_heater_energy_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_water_heater_energy_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy usage', + 'platform': 'aosmith', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_usage', + 'unique_id': 'energy_usage_junctionId', + 'unit_of_measurement': , + }) +# --- +# name: test_state[sensor.my_water_heater_energy_usage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -15,7 +53,46 @@ 'state': '132.825', }) # --- -# name: test_state[sensor.my_water_heater_hot_water_availability] +# name: test_state[sensor.my_water_heater_hot_water_availability-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'low', + 'medium', + 'high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_water_heater_hot_water_availability', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hot water availability', + 'platform': 'aosmith', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hot_water_availability', + 'unique_id': 'hot_water_availability_junctionId', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[sensor.my_water_heater_hot_water_availability-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'enum', diff --git a/tests/components/aosmith/snapshots/test_water_heater.ambr b/tests/components/aosmith/snapshots/test_water_heater.ambr index c3740341c17..deb079570f1 100644 --- a/tests/components/aosmith/snapshots/test_water_heater.ambr +++ b/tests/components/aosmith/snapshots/test_water_heater.ambr @@ -1,5 +1,103 @@ # serializer version: 1 -# name: test_state +# name: test_state[False][water_heater.my_water_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 130, + 'min_temp': 95, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.my_water_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'aosmith', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'junctionId', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[False][water_heater.my_water_heater-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'away_mode': 'off', + 'current_temperature': None, + 'friendly_name': 'My water heater', + 'max_temp': 130, + 'min_temp': 95, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': 130, + }), + 'context': , + 'entity_id': 'water_heater.my_water_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'electric', + }) +# --- +# name: test_state[True][water_heater.my_water_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 130, + 'min_temp': 95, + 'operation_list': list([ + 'electric', + 'eco', + 'heat_pump', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.my_water_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'aosmith', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'junctionId', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[True][water_heater.my_water_heater-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'away_mode': 'off', @@ -26,24 +124,3 @@ 'state': 'heat_pump', }) # --- -# name: test_state_non_heat_pump[False] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'away_mode': 'off', - 'current_temperature': None, - 'friendly_name': 'My water heater', - 'max_temp': 130, - 'min_temp': 95, - 'supported_features': , - 'target_temp_high': None, - 'target_temp_low': None, - 'temperature': 130, - }), - 'context': , - 'entity_id': 'water_heater.my_water_heater', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'electric', - }) -# --- diff --git a/tests/components/aosmith/test_sensor.py b/tests/components/aosmith/test_sensor.py index f94dfdb710c..d6acd8865d8 100644 --- a/tests/components/aosmith/test_sensor.py +++ b/tests/components/aosmith/test_sensor.py @@ -1,50 +1,30 @@ """Tests for the sensor platform of the A. O. Smith integration.""" +from collections.abc import AsyncGenerator +from unittest.mock import patch + import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform -@pytest.mark.parametrize( - ("entity_id", "unique_id"), - [ - ( - "sensor.my_water_heater_hot_water_availability", - "hot_water_availability_junctionId", - ), - ("sensor.my_water_heater_energy_usage", "energy_usage_junctionId"), - ], -) -async def test_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, - entity_id: str, - unique_id: str, -) -> None: - """Test the setup of the sensor entities.""" - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == unique_id +@pytest.fixture(autouse=True) +async def platforms() -> AsyncGenerator[list[str], None]: + """Return the platforms to be loaded for this test.""" + with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.SENSOR]): + yield -@pytest.mark.parametrize( - ("entity_id"), - [ - "sensor.my_water_heater_hot_water_availability", - "sensor.my_water_heater_energy_usage", - ], -) async def test_state( hass: HomeAssistant, init_integration: MockConfigEntry, snapshot: SnapshotAssertion, - entity_id: str, + entity_registry: er.EntityRegistry, ) -> None: """Test the state of the sensor entities.""" - state = hass.states.get(entity_id) - assert state == snapshot + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) diff --git a/tests/components/aosmith/test_water_heater.py b/tests/components/aosmith/test_water_heater.py index a256f720c0a..567121ac0b0 100644 --- a/tests/components/aosmith/test_water_heater.py +++ b/tests/components/aosmith/test_water_heater.py @@ -1,6 +1,7 @@ """Tests for the water heater platform of the A. O. Smith integration.""" -from unittest.mock import MagicMock +from collections.abc import AsyncGenerator +from unittest.mock import MagicMock, patch from py_aosmith.models import OperationMode import pytest @@ -19,53 +20,33 @@ from homeassistant.components.water_heater import ( STATE_HEAT_PUMP, WaterHeaterEntityFeature, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_FRIENDLY_NAME, - ATTR_SUPPORTED_FEATURES, -) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform -async def test_setup( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, -) -> None: - """Test the setup of the water heater entity.""" - entry = entity_registry.async_get("water_heater.my_water_heater") - assert entry - assert entry.unique_id == "junctionId" - - state = hass.states.get("water_heater.my_water_heater") - assert state - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "My water heater" - - -async def test_state( - hass: HomeAssistant, init_integration: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: - """Test the state of the water heater entity.""" - state = hass.states.get("water_heater.my_water_heater") - assert state == snapshot +@pytest.fixture(autouse=True) +async def platforms() -> AsyncGenerator[list[str], None]: + """Return the platforms to be loaded for this test.""" + with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.WATER_HEATER]): + yield @pytest.mark.parametrize( ("get_devices_fixture_heat_pump"), - [ - False, - ], + [False, True], ) -async def test_state_non_heat_pump( - hass: HomeAssistant, init_integration: MockConfigEntry, snapshot: SnapshotAssertion +async def test_state( + hass: HomeAssistant, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, ) -> None: - """Test the state of the water heater entity for a non heat pump device.""" - state = hass.states.get("water_heater.my_water_heater") - assert state == snapshot + """Test the state of the water heater entities.""" + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) @pytest.mark.parametrize( diff --git a/tests/components/aranet/__init__.py b/tests/components/aranet/__init__.py index 4dc9434bd65..a6b32d56e4c 100644 --- a/tests/components/aranet/__init__.py +++ b/tests/components/aranet/__init__.py @@ -73,3 +73,11 @@ VALID_ARANET2_DATA_SERVICE_INFO = fake_service_info( 1794: b"\x01!\x04\x04\x01\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x0c\x02\x00O\x00<\x00\x01\x00\x80" }, ) + +VALID_ARANET_RADIATION_DATA_SERVICE_INFO = fake_service_info( + "Aranet\u2622 12345", + "0000fce0-0000-1000-8000-00805f9b34fb", + { + 1794: b"\x02!&\x04\x01\x00`-\x00\x00\x08\x98\x05\x00n\x00\x00d\x00,\x01\xfd\x00\xc7" + }, +) diff --git a/tests/components/aranet/test_sensor.py b/tests/components/aranet/test_sensor.py index 20aea65989d..0d57f00fdf4 100644 --- a/tests/components/aranet/test_sensor.py +++ b/tests/components/aranet/test_sensor.py @@ -8,6 +8,7 @@ from homeassistant.core import HomeAssistant from . import ( DISABLED_INTEGRATIONS_SERVICE_INFO, VALID_ARANET2_DATA_SERVICE_INFO, + VALID_ARANET_RADIATION_DATA_SERVICE_INFO, VALID_DATA_SERVICE_INFO, ) @@ -15,6 +16,65 @@ from tests.common import MockConfigEntry from tests.components.bluetooth import inject_bluetooth_service_info +async def test_sensors_aranet_radiation( + hass: HomeAssistant, entity_registry_enabled_by_default: None +) -> None: + """Test setting up creates the sensors for Aranet Radiation device.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="aa:bb:cc:dd:ee:ff", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 0 + inject_bluetooth_service_info(hass, VALID_ARANET_RADIATION_DATA_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all("sensor")) == 4 + + batt_sensor = hass.states.get("sensor.aranet_12345_battery") + batt_sensor_attrs = batt_sensor.attributes + assert batt_sensor.state == "100" + assert batt_sensor_attrs[ATTR_FRIENDLY_NAME] == "Aranet\u2622 12345 Battery" + assert batt_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert batt_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + humid_sensor = hass.states.get("sensor.aranet_12345_radiation_total_dose") + humid_sensor_attrs = humid_sensor.attributes + assert humid_sensor.state == "0.011616" + assert ( + humid_sensor_attrs[ATTR_FRIENDLY_NAME] + == "Aranet\u2622 12345 Radiation Total Dose" + ) + assert humid_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "mSv" + assert humid_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + temp_sensor = hass.states.get("sensor.aranet_12345_radiation_dose_rate") + temp_sensor_attrs = temp_sensor.attributes + assert temp_sensor.state == "0.11" + assert ( + temp_sensor_attrs[ATTR_FRIENDLY_NAME] + == "Aranet\u2622 12345 Radiation Dose Rate" + ) + assert temp_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "μSv/h" + assert temp_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + interval_sensor = hass.states.get("sensor.aranet_12345_update_interval") + interval_sensor_attrs = interval_sensor.attributes + assert interval_sensor.state == "300" + assert ( + interval_sensor_attrs[ATTR_FRIENDLY_NAME] + == "Aranet\u2622 12345 Update Interval" + ) + assert interval_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "s" + assert interval_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + async def test_sensors_aranet2( hass: HomeAssistant, entity_registry_enabled_by_default: None ) -> None: diff --git a/tests/components/awair/conftest.py b/tests/components/awair/conftest.py index ec15561cc05..91c3d31e35b 100644 --- a/tests/components/awair/conftest.py +++ b/tests/components/awair/conftest.py @@ -7,67 +7,67 @@ import pytest from tests.common import load_fixture -@pytest.fixture(name="cloud_devices", scope="session") +@pytest.fixture(name="cloud_devices", scope="package") def cloud_devices_fixture(): """Fixture representing devices returned by Awair Cloud API.""" return json.loads(load_fixture("awair/cloud_devices.json")) -@pytest.fixture(name="local_devices", scope="session") +@pytest.fixture(name="local_devices", scope="package") def local_devices_fixture(): """Fixture representing devices returned by Awair local API.""" return json.loads(load_fixture("awair/local_devices.json")) -@pytest.fixture(name="gen1_data", scope="session") +@pytest.fixture(name="gen1_data", scope="package") def gen1_data_fixture(): """Fixture representing data returned from Gen1 Awair device.""" return json.loads(load_fixture("awair/awair.json")) -@pytest.fixture(name="gen2_data", scope="session") +@pytest.fixture(name="gen2_data", scope="package") def gen2_data_fixture(): """Fixture representing data returned from Gen2 Awair device.""" return json.loads(load_fixture("awair/awair-r2.json")) -@pytest.fixture(name="glow_data", scope="session") +@pytest.fixture(name="glow_data", scope="package") def glow_data_fixture(): """Fixture representing data returned from Awair glow device.""" return json.loads(load_fixture("awair/glow.json")) -@pytest.fixture(name="mint_data", scope="session") +@pytest.fixture(name="mint_data", scope="package") def mint_data_fixture(): """Fixture representing data returned from Awair mint device.""" return json.loads(load_fixture("awair/mint.json")) -@pytest.fixture(name="no_devices", scope="session") +@pytest.fixture(name="no_devices", scope="package") def no_devicess_fixture(): """Fixture representing when no devices are found in Awair's cloud API.""" return json.loads(load_fixture("awair/no_devices.json")) -@pytest.fixture(name="awair_offline", scope="session") +@pytest.fixture(name="awair_offline", scope="package") def awair_offline_fixture(): """Fixture representing when Awair devices are offline.""" return json.loads(load_fixture("awair/awair-offline.json")) -@pytest.fixture(name="omni_data", scope="session") +@pytest.fixture(name="omni_data", scope="package") def omni_data_fixture(): """Fixture representing data returned from Awair omni device.""" return json.loads(load_fixture("awair/omni.json")) -@pytest.fixture(name="user", scope="session") +@pytest.fixture(name="user", scope="package") def user_fixture(): """Fixture representing the User object returned from Awair's Cloud API.""" return json.loads(load_fixture("awair/user.json")) -@pytest.fixture(name="local_data", scope="session") +@pytest.fixture(name="local_data", scope="package") def local_data_fixture(): """Fixture representing data returned from Awair local device.""" return json.loads(load_fixture("awair/awair-local.json")) diff --git a/tests/components/axis/conftest.py b/tests/components/axis/conftest.py index b50a28df49f..7a4e446a0cc 100644 --- a/tests/components/axis/conftest.py +++ b/tests/components/axis/conftest.py @@ -114,6 +114,7 @@ def default_request_fixture( port_management_payload: dict[str, Any], param_properties_payload: dict[str, Any], param_ports_payload: dict[str, Any], + mqtt_status_code: int, ) -> Callable[[str], None]: """Mock default Vapix requests responses.""" @@ -131,7 +132,7 @@ def default_request_fixture( json=port_management_payload, ) respx.post("/axis-cgi/mqtt/client.cgi").respond( - json=MQTT_CLIENT_RESPONSE, + json=MQTT_CLIENT_RESPONSE, status_code=mqtt_status_code ) respx.post("/axis-cgi/streamprofile.cgi").respond( json=STREAM_PROFILES_RESPONSE, @@ -239,6 +240,12 @@ def param_ports_data_fixture() -> dict[str, Any]: return PORTS_RESPONSE +@pytest.fixture(name="mqtt_status_code") +def mqtt_status_code_fixture(): + """Property parameter data.""" + return 200 + + @pytest.fixture(name="setup_default_vapix_requests") def default_vapix_requests_fixture(mock_vapix_requests: Callable[[str], None]) -> None: """Mock default Vapix requests responses.""" diff --git a/tests/components/axis/test_hub.py b/tests/components/axis/test_hub.py index 1ae6db05427..5948874f0bf 100644 --- a/tests/components/axis/test_hub.py +++ b/tests/components/axis/test_hub.py @@ -2,7 +2,7 @@ from ipaddress import ip_address from unittest import mock -from unittest.mock import Mock, patch +from unittest.mock import Mock, call, patch import axis as axislib import pytest @@ -91,7 +91,8 @@ async def test_device_support_mqtt( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_config_entry ) -> None: """Successful setup.""" - mqtt_mock.async_subscribe.assert_called_with(f"axis/{MAC}/#", mock.ANY, 0, "utf-8") + mqtt_call = call(f"axis/{MAC}/#", mock.ANY, 0, "utf-8") + assert mqtt_call in mqtt_mock.async_subscribe.call_args_list topic = f"axis/{MAC}/event/tns:onvif/Device/tns:axis/Sensor/PIR/$source/sensor/0" message = ( @@ -109,6 +110,16 @@ async def test_device_support_mqtt( assert pir.name == f"{NAME} PIR 0" +@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_MQTT]) +@pytest.mark.parametrize("mqtt_status_code", [401]) +async def test_device_support_mqtt_low_privilege( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_config_entry +) -> None: + """Successful setup.""" + mqtt_call = call(f"{MAC}/#", mock.ANY, 0, "utf-8") + assert mqtt_call not in mqtt_mock.async_subscribe.call_args_list + + async def test_update_address( hass: HomeAssistant, setup_config_entry, mock_vapix_requests ) -> None: diff --git a/tests/components/blueprint/test_importer.py b/tests/components/blueprint/test_importer.py index 76f3ff36d05..275ee08863e 100644 --- a/tests/components/blueprint/test_importer.py +++ b/tests/components/blueprint/test_importer.py @@ -13,7 +13,7 @@ from tests.common import load_fixture from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def community_post(): """Topic JSON with a codeblock marked as auto syntax.""" return load_fixture("blueprint/community_post.json") diff --git a/tests/components/bluetooth/test_config_flow.py b/tests/components/bluetooth/test_config_flow.py index d044be76e6d..33474280ec4 100644 --- a/tests/components/bluetooth/test_config_flow.py +++ b/tests/components/bluetooth/test_config_flow.py @@ -99,9 +99,7 @@ async def test_async_step_user_linux_one_adapter( result["flow_id"], user_input={} ) assert result2["type"] is FlowResultType.CREATE_ENTRY - assert ( - result2["title"] == "ACME Bluetooth Adapter 5.0 (cc01:aa01) (00:00:00:00:00:01)" - ) + assert result2["title"] == "ACME Bluetooth Adapter 5.0 (00:00:00:00:00:01)" assert result2["data"] == {} assert len(mock_setup_entry.mock_calls) == 1 @@ -144,9 +142,7 @@ async def test_async_step_user_linux_two_adapters( result["flow_id"], user_input={CONF_ADAPTER: "hci1"} ) assert result2["type"] is FlowResultType.CREATE_ENTRY - assert ( - result2["title"] == "ACME Bluetooth Adapter 5.0 (cc01:aa01) (00:00:00:00:00:02)" - ) + assert result2["title"] == "ACME Bluetooth Adapter 5.0 (00:00:00:00:00:02)" assert result2["data"] == {} assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/bluetooth/test_init.py b/tests/components/bluetooth/test_init.py index 82fa0341966..8c26745d541 100644 --- a/tests/components/bluetooth/test_init.py +++ b/tests/components/bluetooth/test_init.py @@ -3173,3 +3173,16 @@ async def test_haos_9_or_later( registry = async_get_issue_registry(hass) issue = registry.async_get_issue(DOMAIN, "haos_outdated") assert issue is None + + +async def test_title_updated_if_mac_address( + hass: HomeAssistant, mock_bleak_scanner_start: MagicMock, one_adapter: None +) -> None: + """Test the title is updated if it is the mac address.""" + entry = MockConfigEntry( + domain="bluetooth", title="00:00:00:00:00:01", unique_id="00:00:00:00:00:01" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.title == "ACME Bluetooth Adapter 5.0 (00:00:00:00:00:01)" diff --git a/tests/components/brother/snapshots/test_sensor.ambr b/tests/components/brother/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a27c5addd61 --- /dev/null +++ b/tests/components/brother/snapshots/test_sensor.ambr @@ -0,0 +1,1394 @@ +# serializer version: 1 +# name: test_sensors[sensor.hl_l2340dw_b_w_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_b_w_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'B/W pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bw_pages', + 'unique_id': '0123456789_bw_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_b_w_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW B/W pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_b_w_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '709', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_belt_unit_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_belt_unit_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Belt unit remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'belt_unit_remaining_life', + 'unique_id': '0123456789_belt_unit_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_belt_unit_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Belt unit remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_belt_unit_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '97', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'black_drum_page_counter', + 'unique_id': '0123456789_black_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Black drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1611', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'black_drum_remaining_life', + 'unique_id': '0123456789_black_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Black drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'black_drum_remaining_pages', + 'unique_id': '0123456789_black_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Black drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16389', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_toner_remaining-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_black_toner_remaining', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Black toner remaining', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'black_toner_remaining', + 'unique_id': '0123456789_black_toner_remaining', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_black_toner_remaining-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Black toner remaining', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_black_toner_remaining', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_color_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_color_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Color pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'color_pages', + 'unique_id': '0123456789_color_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_color_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Color pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_color_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '902', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cyan_drum_page_counter', + 'unique_id': '0123456789_cyan_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Cyan drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1611', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cyan_drum_remaining_life', + 'unique_id': '0123456789_cyan_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Cyan drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cyan_drum_remaining_pages', + 'unique_id': '0123456789_cyan_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Cyan drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16389', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_toner_remaining-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_cyan_toner_remaining', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cyan toner remaining', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cyan_toner_remaining', + 'unique_id': '0123456789_cyan_toner_remaining', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_cyan_toner_remaining-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Cyan toner remaining', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_cyan_toner_remaining', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drum_page_counter', + 'unique_id': '0123456789_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '986', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drum_remaining_life', + 'unique_id': '0123456789_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drum_remaining_pages', + 'unique_id': '0123456789_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11014', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_duplex_unit_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_duplex_unit_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Duplex unit page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'duplex_unit_page_counter', + 'unique_id': '0123456789_duplex_unit_pages_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_duplex_unit_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Duplex unit page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_duplex_unit_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '538', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_fuser_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_fuser_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fuser remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fuser_remaining_life', + 'unique_id': '0123456789_fuser_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_fuser_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Fuser remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_fuser_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '97', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_last_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_last_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last restart', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_restart', + 'unique_id': '0123456789_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_last_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'HL-L2340DW Last restart', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_last_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-03-03T15:04:24+00:00', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'magenta_drum_page_counter', + 'unique_id': '0123456789_magenta_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Magenta drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1611', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'magenta_drum_remaining_life', + 'unique_id': '0123456789_magenta_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Magenta drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'magenta_drum_remaining_pages', + 'unique_id': '0123456789_magenta_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Magenta drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16389', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_toner_remaining-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_magenta_toner_remaining', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Magenta toner remaining', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'magenta_toner_remaining', + 'unique_id': '0123456789_magenta_toner_remaining', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_magenta_toner_remaining-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Magenta toner remaining', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_magenta_toner_remaining', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'page_counter', + 'unique_id': '0123456789_page_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '986', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_pf_kit_1_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_pf_kit_1_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'PF Kit 1 remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pf_kit_1_remaining_life', + 'unique_id': '0123456789_pf_kit_1_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_pf_kit_1_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW PF Kit 1 remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_pf_kit_1_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '98', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': '0123456789_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Status', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'waiting', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_page_counter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_page_counter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow drum page counter', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yellow_drum_page_counter', + 'unique_id': '0123456789_yellow_drum_counter', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_page_counter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Yellow drum page counter', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_page_counter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1611', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_lifetime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_lifetime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow drum remaining lifetime', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yellow_drum_remaining_life', + 'unique_id': '0123456789_yellow_drum_remaining_life', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_lifetime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Yellow drum remaining lifetime', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_lifetime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_pages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_pages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow drum remaining pages', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yellow_drum_remaining_pages', + 'unique_id': '0123456789_yellow_drum_remaining_pages', + 'unit_of_measurement': 'p', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_pages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Yellow drum remaining pages', + 'state_class': , + 'unit_of_measurement': 'p', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_pages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16389', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_toner_remaining-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hl_l2340dw_yellow_toner_remaining', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Yellow toner remaining', + 'platform': 'brother', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'yellow_toner_remaining', + 'unique_id': '0123456789_yellow_toner_remaining', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.hl_l2340dw_yellow_toner_remaining-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'HL-L2340DW Yellow toner remaining', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hl_l2340dw_yellow_toner_remaining', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- diff --git a/tests/components/brother/test_sensor.py b/tests/components/brother/test_sensor.py index ff29f8cb368..069a5ddc152 100644 --- a/tests/components/brother/test_sensor.py +++ b/tests/components/brother/test_sensor.py @@ -1,389 +1,40 @@ """Test sensor of Brother integration.""" -from datetime import datetime, timedelta +from datetime import timedelta import json -from unittest.mock import Mock, patch +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion from homeassistant.components.brother.const import DOMAIN -from homeassistant.components.brother.sensor import UNIT_PAGES -from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, - DOMAIN as SENSOR_DOMAIN, - SensorDeviceClass, - SensorStateClass, -) -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_ENTITY_ID, - ATTR_ICON, - ATTR_UNIT_OF_MEASUREMENT, - PERCENTAGE, - STATE_UNAVAILABLE, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from homeassistant.util.dt import UTC, utcnow +from homeassistant.util.dt import utcnow from . import init_integration -from tests.common import async_fire_time_changed, load_fixture - -ATTR_REMAINING_PAGES = "remaining_pages" -ATTR_COUNTER = "counter" +from tests.common import async_fire_time_changed, load_fixture, snapshot_platform -async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: - """Test states of the sensors.""" - entry = await init_integration(hass, skip_setup=True) - - # Pre-create registry entries for disabled by default sensors - entity_registry.async_get_or_create( - SENSOR_DOMAIN, - DOMAIN, - "0123456789_uptime", - suggested_object_id="hl_l2340dw_last_restart", - disabled_by=None, - ) - test_time = datetime(2019, 11, 11, 9, 10, 32, tzinfo=UTC) - with ( - patch("brother.Brother.initialize"), - patch("brother.datetime", now=Mock(return_value=test_time)), - patch( - "brother.Brother._get_data", - return_value=json.loads(load_fixture("printer_data.json", "brother")), - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get("sensor.hl_l2340dw_status") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.state == "waiting" - assert state.attributes.get(ATTR_STATE_CLASS) is None - - entry = entity_registry.async_get("sensor.hl_l2340dw_status") - assert entry - assert entry.unique_id == "0123456789_status" - - state = hass.states.get("sensor.hl_l2340dw_black_toner_remaining") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "75" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_black_toner_remaining") - assert entry - assert entry.unique_id == "0123456789_black_toner_remaining" - - state = hass.states.get("sensor.hl_l2340dw_cyan_toner_remaining") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "10" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_cyan_toner_remaining") - assert entry - assert entry.unique_id == "0123456789_cyan_toner_remaining" - - state = hass.states.get("sensor.hl_l2340dw_magenta_toner_remaining") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "8" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_magenta_toner_remaining") - assert entry - assert entry.unique_id == "0123456789_magenta_toner_remaining" - - state = hass.states.get("sensor.hl_l2340dw_yellow_toner_remaining") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "2" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_yellow_toner_remaining") - assert entry - assert entry.unique_id == "0123456789_yellow_toner_remaining" - - state = hass.states.get("sensor.hl_l2340dw_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_drum_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "11014" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "986" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_black_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_black_drum_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_black_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_black_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "16389" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_black_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_black_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_black_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "1611" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_black_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_black_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_cyan_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_cyan_drum_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_cyan_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_cyan_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "16389" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_cyan_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_cyan_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_cyan_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "1611" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_cyan_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_cyan_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_magenta_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get( - "sensor.hl_l2340dw_magenta_drum_remaining_lifetime" - ) - assert entry - assert entry.unique_id == "0123456789_magenta_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_magenta_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "16389" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_magenta_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_magenta_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_magenta_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "1611" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_magenta_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_magenta_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_yellow_drum_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "92" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get( - "sensor.hl_l2340dw_yellow_drum_remaining_lifetime" - ) - assert entry - assert entry.unique_id == "0123456789_yellow_drum_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_yellow_drum_remaining_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "16389" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_yellow_drum_remaining_pages") - assert entry - assert entry.unique_id == "0123456789_yellow_drum_remaining_pages" - - state = hass.states.get("sensor.hl_l2340dw_yellow_drum_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "1611" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_yellow_drum_page_counter") - assert entry - assert entry.unique_id == "0123456789_yellow_drum_counter" - - state = hass.states.get("sensor.hl_l2340dw_fuser_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "97" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_fuser_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_fuser_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_belt_unit_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "97" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_belt_unit_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_belt_unit_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_pf_kit_1_remaining_lifetime") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "98" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_pf_kit_1_remaining_lifetime") - assert entry - assert entry.unique_id == "0123456789_pf_kit_1_remaining_life" - - state = hass.states.get("sensor.hl_l2340dw_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "986" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_page_counter") - assert entry - assert entry.unique_id == "0123456789_page_counter" - - state = hass.states.get("sensor.hl_l2340dw_duplex_unit_page_counter") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "538" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_duplex_unit_page_counter") - assert entry - assert entry.unique_id == "0123456789_duplex_unit_pages_counter" - - state = hass.states.get("sensor.hl_l2340dw_b_w_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "709" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_b_w_pages") - assert entry - assert entry.unique_id == "0123456789_bw_counter" - - state = hass.states.get("sensor.hl_l2340dw_color_pages") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES - assert state.state == "902" - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - - entry = entity_registry.async_get("sensor.hl_l2340dw_color_pages") - assert entry - assert entry.unique_id == "0123456789_color_counter" - - state = hass.states.get("sensor.hl_l2340dw_last_restart") - assert state - assert state.attributes.get(ATTR_ICON) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP - assert state.state == "2019-09-24T12:14:56+00:00" - assert state.attributes.get(ATTR_STATE_CLASS) is None - - entry = entity_registry.async_get("sensor.hl_l2340dw_last_restart") - assert entry - assert entry.unique_id == "0123456789_uptime" - - -async def test_disabled_by_default_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry +async def test_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entity_registry_enabled_by_default: None, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, ) -> None: - """Test the disabled by default Brother sensors.""" - await init_integration(hass) + """Test states of the sensors.""" + hass.config.set_time_zone("UTC") + freezer.move_to("2024-04-20 12:00:00+00:00") - state = hass.states.get("sensor.hl_l2340dw_last_restart") - assert state is None + with patch("homeassistant.components.brother.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass) - entry = entity_registry.async_get("sensor.hl_l2340dw_last_restart") - assert entry - assert entry.unique_id == "0123456789_uptime" - assert entry.disabled - assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 5e15aa32b6f..bcddc32f107 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -24,6 +24,7 @@ from homeassistant.components.homeassistant.exposed_entities import ( ExposedEntities, async_expose_entity, ) +from homeassistant.components.http.const import StrictConnectionMode from homeassistant.const import CONTENT_TYPE_JSON, __version__ as HA_VERSION from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er @@ -387,6 +388,7 @@ async def test_cloud_connection_info(hass: HomeAssistant) -> None: "connected": False, "enabled": False, "instance_domain": None, + "strict_connection": StrictConnectionMode.DISABLED, }, "version": HA_VERSION, } diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index 5ee9af88681..d9d2b5c6742 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -19,6 +19,7 @@ from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY from homeassistant.components.cloud.const import DEFAULT_EXPOSED_DOMAINS, DOMAIN from homeassistant.components.google_assistant.helpers import GoogleEntity from homeassistant.components.homeassistant import exposed_entities +from homeassistant.components.http.const import StrictConnectionMode from homeassistant.components.websocket_api import ERR_INVALID_FORMAT from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er @@ -782,6 +783,7 @@ async def test_websocket_status( "google_report_state": True, "remote_allow_remote_enable": True, "remote_enabled": False, + "strict_connection": "disabled", "tts_default_voice": ["en-US", "JennyNeural"], }, "alexa_entities": { @@ -901,6 +903,7 @@ async def test_websocket_update_preferences( assert cloud.client.prefs.alexa_enabled assert cloud.client.prefs.google_secure_devices_pin is None assert cloud.client.prefs.remote_allow_remote_enable is True + assert cloud.client.prefs.strict_connection is StrictConnectionMode.DISABLED client = await hass_ws_client(hass) @@ -912,6 +915,7 @@ async def test_websocket_update_preferences( "google_secure_devices_pin": "1234", "tts_default_voice": ["en-GB", "RyanNeural"], "remote_allow_remote_enable": False, + "strict_connection": StrictConnectionMode.DROP_CONNECTION, } ) response = await client.receive_json() @@ -922,6 +926,7 @@ async def test_websocket_update_preferences( assert cloud.client.prefs.google_secure_devices_pin == "1234" assert cloud.client.prefs.remote_allow_remote_enable is False assert cloud.client.prefs.tts_default_voice == ("en-GB", "RyanNeural") + assert cloud.client.prefs.strict_connection is StrictConnectionMode.DROP_CONNECTION @pytest.mark.parametrize( diff --git a/tests/components/cloud/test_init.py b/tests/components/cloud/test_init.py index 9cc1324ebc1..bc4526975da 100644 --- a/tests/components/cloud/test_init.py +++ b/tests/components/cloud/test_init.py @@ -3,6 +3,7 @@ from collections.abc import Callable, Coroutine from typing import Any from unittest.mock import MagicMock, patch +from urllib.parse import quote_plus from hass_nabucasa import Cloud import pytest @@ -13,11 +14,16 @@ from homeassistant.components.cloud import ( CloudNotConnected, async_get_or_create_cloudhook, ) -from homeassistant.components.cloud.const import DOMAIN, PREF_CLOUDHOOKS +from homeassistant.components.cloud.const import ( + DOMAIN, + PREF_CLOUDHOOKS, + PREF_STRICT_CONNECTION, +) from homeassistant.components.cloud.prefs import STORAGE_KEY +from homeassistant.components.http.const import StrictConnectionMode from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import Context, HomeAssistant -from homeassistant.exceptions import Unauthorized +from homeassistant.exceptions import ServiceValidationError, Unauthorized from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, MockUser @@ -295,3 +301,77 @@ async def test_cloud_logout( await hass.async_block_till_done() assert cloud.is_logged_in is False + + +async def test_service_create_temporary_strict_connection_url_strict_connection_disabled( + hass: HomeAssistant, +) -> None: + """Test service create_temporary_strict_connection_url with strict_connection not enabled.""" + mock_config_entry = MockConfigEntry(domain=DOMAIN) + mock_config_entry.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {"cloud": {}}) + await hass.async_block_till_done() + with pytest.raises( + ServiceValidationError, + match="Strict connection is not enabled for cloud requests", + ): + await hass.services.async_call( + cloud.DOMAIN, + "create_temporary_strict_connection_url", + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize( + ("mode"), + [ + StrictConnectionMode.DROP_CONNECTION, + StrictConnectionMode.GUARD_PAGE, + ], +) +async def test_service_create_temporary_strict_connection( + hass: HomeAssistant, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], + mode: StrictConnectionMode, +) -> None: + """Test service create_temporary_strict_connection_url.""" + mock_config_entry = MockConfigEntry(domain=DOMAIN) + mock_config_entry.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {"cloud": {}}) + await hass.async_block_till_done() + + await set_cloud_prefs( + { + PREF_STRICT_CONNECTION: mode, + } + ) + + # No cloud url set + with pytest.raises(ServiceValidationError, match="No cloud URL available"): + await hass.services.async_call( + cloud.DOMAIN, + "create_temporary_strict_connection_url", + blocking=True, + return_response=True, + ) + + # Patch cloud url + url = "https://example.com" + with patch( + "homeassistant.helpers.network._get_cloud_url", + return_value=url, + ): + response = await hass.services.async_call( + cloud.DOMAIN, + "create_temporary_strict_connection_url", + blocking=True, + return_response=True, + ) + assert isinstance(response, dict) + direct_url_prefix = f"{url}/auth/strict_connection/temp_token?authSig=" + assert response.pop("direct_url").startswith(direct_url_prefix) + assert response.pop("url").startswith( + f"https://login.home-assistant.io?u={quote_plus(direct_url_prefix)}" + ) + assert response == {} # No more keys in response diff --git a/tests/components/cloud/test_prefs.py b/tests/components/cloud/test_prefs.py index 9b0fa4c01d7..1ed2e1d524f 100644 --- a/tests/components/cloud/test_prefs.py +++ b/tests/components/cloud/test_prefs.py @@ -6,8 +6,13 @@ from unittest.mock import ANY, MagicMock, patch import pytest from homeassistant.auth.const import GROUP_ID_ADMIN -from homeassistant.components.cloud.const import DOMAIN, PREF_TTS_DEFAULT_VOICE +from homeassistant.components.cloud.const import ( + DOMAIN, + PREF_STRICT_CONNECTION, + PREF_TTS_DEFAULT_VOICE, +) from homeassistant.components.cloud.prefs import STORAGE_KEY, CloudPreferences +from homeassistant.components.http.const import StrictConnectionMode from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -174,3 +179,21 @@ async def test_tts_default_voice_legacy_gender( await hass.async_block_till_done() assert cloud.client.prefs.tts_default_voice == (expected_language, voice) + + +@pytest.mark.parametrize("mode", list(StrictConnectionMode)) +async def test_strict_connection_convertion( + hass: HomeAssistant, + cloud: MagicMock, + hass_storage: dict[str, Any], + mode: StrictConnectionMode, +) -> None: + """Test strict connection string value will be converted to the enum.""" + hass_storage[STORAGE_KEY] = { + "version": 1, + "data": {PREF_STRICT_CONNECTION: mode.value}, + } + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + + assert cloud.client.prefs.strict_connection is mode diff --git a/tests/components/cloud/test_strict_connection.py b/tests/components/cloud/test_strict_connection.py new file mode 100644 index 00000000000..f275bc4d2dd --- /dev/null +++ b/tests/components/cloud/test_strict_connection.py @@ -0,0 +1,294 @@ +"""Test strict connection mode for cloud.""" + +from collections.abc import Awaitable, Callable, Coroutine, Generator +from contextlib import contextmanager +from datetime import timedelta +from http import HTTPStatus +from typing import Any +from unittest.mock import MagicMock, Mock, patch + +from aiohttp import ServerDisconnectedError, web +from aiohttp.test_utils import TestClient +from aiohttp_session import get_session +import pytest +from yarl import URL + +from homeassistant.auth.models import RefreshToken +from homeassistant.auth.session import SESSION_ID, TEMP_TIMEOUT +from homeassistant.components.cloud.const import PREF_STRICT_CONNECTION +from homeassistant.components.http import KEY_HASS +from homeassistant.components.http.auth import ( + STRICT_CONNECTION_GUARD_PAGE, + async_setup_auth, + async_sign_path, +) +from homeassistant.components.http.const import KEY_AUTHENTICATED, StrictConnectionMode +from homeassistant.components.http.session import COOKIE_NAME, PREFIXED_COOKIE_NAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers.network import is_cloud_connection +from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow + +from tests.common import async_fire_time_changed +from tests.typing import ClientSessionGenerator + + +@pytest.fixture +async def refresh_token(hass: HomeAssistant, hass_access_token: str) -> RefreshToken: + """Return a refresh token.""" + refresh_token = hass.auth.async_validate_access_token(hass_access_token) + assert refresh_token + session = hass.auth.session + assert session._strict_connection_sessions == {} + assert session._temp_sessions == {} + return refresh_token + + +@contextmanager +def simulate_cloud_request() -> Generator[None, None, None]: + """Simulate a cloud request.""" + with patch( + "hass_nabucasa.remote.is_cloud_request", Mock(get=Mock(return_value=True)) + ): + yield + + +@pytest.fixture +def app_strict_connection( + hass: HomeAssistant, refresh_token: RefreshToken +) -> web.Application: + """Fixture to set up a web.Application.""" + + async def handler(request): + """Return if request was authenticated.""" + return web.json_response(data={"authenticated": request[KEY_AUTHENTICATED]}) + + app = web.Application() + app[KEY_HASS] = hass + app.router.add_get("/", handler) + + async def set_cookie(request: web.Request) -> web.Response: + hass = request.app[KEY_HASS] + # Clear all sessions + hass.auth.session._temp_sessions.clear() + hass.auth.session._strict_connection_sessions.clear() + + if request.query["token"] == "refresh": + await hass.auth.session.async_create_session(request, refresh_token) + else: + await hass.auth.session.async_create_temp_unauthorized_session(request) + session = await get_session(request) + return web.Response(text=session[SESSION_ID]) + + app.router.add_get("/test/cookie", set_cookie) + return app + + +@pytest.fixture(name="client") +async def set_up_fixture( + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + app_strict_connection: web.Application, + cloud: MagicMock, + socket_enabled: None, +) -> TestClient: + """Set up the fixture.""" + + await async_setup_auth(hass, app_strict_connection, StrictConnectionMode.DISABLED) + assert await async_setup_component(hass, "cloud", {"cloud": {}}) + await hass.async_block_till_done() + return await aiohttp_client(app_strict_connection) + + +@pytest.mark.parametrize( + "strict_connection_mode", [e.value for e in StrictConnectionMode] +) +async def test_strict_connection_cloud_authenticated_requests( + hass: HomeAssistant, + client: TestClient, + hass_access_token: str, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], + refresh_token: RefreshToken, + strict_connection_mode: StrictConnectionMode, +) -> None: + """Test authenticated requests with strict connection.""" + assert hass.auth.session._strict_connection_sessions == {} + + signed_path = async_sign_path( + hass, "/", timedelta(seconds=5), refresh_token_id=refresh_token.id + ) + + await set_cloud_prefs( + { + PREF_STRICT_CONNECTION: strict_connection_mode, + } + ) + + with simulate_cloud_request(): + assert is_cloud_connection(hass) + req = await client.get( + "/", headers={"Authorization": f"Bearer {hass_access_token}"} + ) + assert req.status == HTTPStatus.OK + assert await req.json() == {"authenticated": True} + req = await client.get(signed_path) + assert req.status == HTTPStatus.OK + assert await req.json() == {"authenticated": True} + + +async def _test_strict_connection_cloud_enabled_external_unauthenticated_requests( + hass: HomeAssistant, + client: TestClient, + perform_unauthenticated_request: Callable[ + [HomeAssistant, TestClient], Awaitable[None] + ], + _: RefreshToken, +) -> None: + """Test external unauthenticated requests with strict connection cloud enabled.""" + with simulate_cloud_request(): + assert is_cloud_connection(hass) + await perform_unauthenticated_request(hass, client) + + +async def _test_strict_connection_cloud_enabled_external_unauthenticated_requests_refresh_token( + hass: HomeAssistant, + client: TestClient, + perform_unauthenticated_request: Callable[ + [HomeAssistant, TestClient], Awaitable[None] + ], + refresh_token: RefreshToken, +) -> None: + """Test external unauthenticated requests with strict connection cloud enabled and refresh token cookie.""" + session = hass.auth.session + + # set strict connection cookie with refresh token + session_id = await _modify_cookie_for_cloud(client, "refresh") + assert session._strict_connection_sessions == {session_id: refresh_token.id} + with simulate_cloud_request(): + assert is_cloud_connection(hass) + req = await client.get("/") + assert req.status == HTTPStatus.OK + assert await req.json() == {"authenticated": False} + + # Invalidate refresh token, which should also invalidate session + hass.auth.async_remove_refresh_token(refresh_token) + assert session._strict_connection_sessions == {} + + await perform_unauthenticated_request(hass, client) + + +async def _test_strict_connection_cloud_enabled_external_unauthenticated_requests_temp_session( + hass: HomeAssistant, + client: TestClient, + perform_unauthenticated_request: Callable[ + [HomeAssistant, TestClient], Awaitable[None] + ], + _: RefreshToken, +) -> None: + """Test external unauthenticated requests with strict connection cloud enabled and temp cookie.""" + session = hass.auth.session + + # set strict connection cookie with temp session + assert session._temp_sessions == {} + session_id = await _modify_cookie_for_cloud(client, "temp") + assert session_id in session._temp_sessions + with simulate_cloud_request(): + assert is_cloud_connection(hass) + resp = await client.get("/") + assert resp.status == HTTPStatus.OK + assert await resp.json() == {"authenticated": False} + + async_fire_time_changed(hass, utcnow() + TEMP_TIMEOUT + timedelta(minutes=1)) + await hass.async_block_till_done(wait_background_tasks=True) + assert session._temp_sessions == {} + + await perform_unauthenticated_request(hass, client) + + +async def _drop_connection_unauthorized_request( + _: HomeAssistant, client: TestClient +) -> None: + with pytest.raises(ServerDisconnectedError): + # unauthorized requests should raise ServerDisconnectedError + await client.get("/") + + +async def _guard_page_unauthorized_request( + hass: HomeAssistant, client: TestClient +) -> None: + req = await client.get("/") + assert req.status == HTTPStatus.IM_A_TEAPOT + + def read_guard_page() -> str: + with open(STRICT_CONNECTION_GUARD_PAGE, encoding="utf-8") as file: + return file.read() + + assert await req.text() == await hass.async_add_executor_job(read_guard_page) + + +@pytest.mark.parametrize( + "test_func", + [ + _test_strict_connection_cloud_enabled_external_unauthenticated_requests, + _test_strict_connection_cloud_enabled_external_unauthenticated_requests_refresh_token, + _test_strict_connection_cloud_enabled_external_unauthenticated_requests_temp_session, + ], + ids=[ + "no cookie", + "refresh token cookie", + "temp session cookie", + ], +) +@pytest.mark.parametrize( + ("strict_connection_mode", "request_func"), + [ + (StrictConnectionMode.DROP_CONNECTION, _drop_connection_unauthorized_request), + (StrictConnectionMode.GUARD_PAGE, _guard_page_unauthorized_request), + ], + ids=["drop connection", "static page"], +) +async def test_strict_connection_cloud_external_unauthenticated_requests( + hass: HomeAssistant, + client: TestClient, + refresh_token: RefreshToken, + set_cloud_prefs: Callable[[dict[str, Any]], Coroutine[Any, Any, None]], + test_func: Callable[ + [ + HomeAssistant, + TestClient, + Callable[[HomeAssistant, TestClient], Awaitable[None]], + RefreshToken, + ], + Awaitable[None], + ], + strict_connection_mode: StrictConnectionMode, + request_func: Callable[[HomeAssistant, TestClient], Awaitable[None]], +) -> None: + """Test external unauthenticated requests with strict connection cloud.""" + await set_cloud_prefs( + { + PREF_STRICT_CONNECTION: strict_connection_mode, + } + ) + + await test_func( + hass, + client, + request_func, + refresh_token, + ) + + +async def _modify_cookie_for_cloud(client: TestClient, token_type: str) -> str: + """Modify cookie for cloud.""" + # Cloud cookie has set secure=true and will not set on unsecure connection + # As we test with unsecure connection, we need to set it manually + # We get the session via http and modify the cookie name to the secure one + session_id = await (await client.get(f"/test/cookie?token={token_type}")).text() + cookie_jar = client.session.cookie_jar + localhost = URL("http://127.0.0.1") + cookie = cookie_jar.filter_cookies(localhost)[COOKIE_NAME].value + assert cookie + cookie_jar.clear() + cookie_jar.update_cookies({PREFIXED_COOKIE_NAME: cookie}, localhost) + return session_id diff --git a/tests/components/device_automation/test_init.py b/tests/components/device_automation/test_init.py index 4526a9d9b67..3c3101d7a1f 100644 --- a/tests/components/device_automation/test_init.py +++ b/tests/components/device_automation/test_init.py @@ -328,23 +328,23 @@ async def test_websocket_get_action_capabilities( assert msg["success"] actions = msg["result"] - id = 2 + msg_id = 2 assert len(actions) == 3 for action in actions: await client.send_json( { - "id": id, + "id": msg_id, "type": "device_automation/action/capabilities", "action": action, } ) msg = await client.receive_json() - assert msg["id"] == id + assert msg["id"] == msg_id assert msg["type"] == TYPE_RESULT assert msg["success"] capabilities = msg["result"] assert capabilities == expected_capabilities[action["type"]] - id = id + 1 + msg_id = msg_id + 1 async def test_websocket_get_action_capabilities_unknown_domain( @@ -487,23 +487,23 @@ async def test_websocket_get_condition_capabilities( assert msg["success"] conditions = msg["result"] - id = 2 + msg_id = 2 assert len(conditions) == 2 for condition in conditions: await client.send_json( { - "id": id, + "id": msg_id, "type": "device_automation/condition/capabilities", "condition": condition, } ) msg = await client.receive_json() - assert msg["id"] == id + assert msg["id"] == msg_id assert msg["type"] == TYPE_RESULT assert msg["success"] capabilities = msg["result"] assert capabilities == expected_capabilities - id = id + 1 + msg_id = msg_id + 1 async def test_websocket_get_condition_capabilities_unknown_domain( @@ -775,23 +775,23 @@ async def test_websocket_get_trigger_capabilities( assert msg["success"] triggers = msg["result"] - id = 2 + msg_id = 2 assert len(triggers) == 3 # toggled, turned_on, turned_off for trigger in triggers: await client.send_json( { - "id": id, + "id": msg_id, "type": "device_automation/trigger/capabilities", "trigger": trigger, } ) msg = await client.receive_json() - assert msg["id"] == id + assert msg["id"] == msg_id assert msg["type"] == TYPE_RESULT assert msg["success"] capabilities = msg["result"] assert capabilities == expected_capabilities - id = id + 1 + msg_id = msg_id + 1 async def test_websocket_get_trigger_capabilities_unknown_domain( diff --git a/tests/components/devolo_home_control/test_siren.py b/tests/components/devolo_home_control/test_siren.py index 037d7b5021f..be662418967 100644 --- a/tests/components/devolo_home_control/test_siren.py +++ b/tests/components/devolo_home_control/test_siren.py @@ -66,7 +66,7 @@ async def test_siren_switching( with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" - ) as set: + ) as property_set: await hass.services.async_call( "siren", "turn_on", @@ -78,11 +78,11 @@ async def test_siren_switching( "Test", ("devolo.SirenMultiLevelSwitch:Test", 1) ) await hass.async_block_till_done() - set.assert_called_once_with(1) + property_set.assert_called_once_with(1) with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" - ) as set: + ) as property_set: await hass.services.async_call( "siren", "turn_off", @@ -95,7 +95,7 @@ async def test_siren_switching( ) await hass.async_block_till_done() assert hass.states.get(f"{DOMAIN}.test").state == STATE_OFF - set.assert_called_once_with(0) + property_set.assert_called_once_with(0) @pytest.mark.usefixtures("mock_zeroconf") @@ -119,7 +119,7 @@ async def test_siren_change_default_tone( with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" - ) as set: + ) as property_set: test_gateway.publisher.dispatch("Test", ("mss:Test", 2)) await hass.services.async_call( "siren", @@ -127,7 +127,7 @@ async def test_siren_change_default_tone( {"entity_id": f"{DOMAIN}.test"}, blocking=True, ) - set.assert_called_once_with(2) + property_set.assert_called_once_with(2) @pytest.mark.usefixtures("mock_zeroconf") diff --git a/tests/components/dwd_weather_warnings/test_config_flow.py b/tests/components/dwd_weather_warnings/test_config_flow.py index 3558ff5ed93..119c029767a 100644 --- a/tests/components/dwd_weather_warnings/test_config_flow.py +++ b/tests/components/dwd_weather_warnings/test_config_flow.py @@ -6,34 +6,31 @@ from unittest.mock import patch import pytest from homeassistant.components.dwd_weather_warnings.const import ( - ADVANCE_WARNING_SENSOR, + CONF_REGION_DEVICE_TRACKER, CONF_REGION_IDENTIFIER, - CONF_REGION_NAME, - CURRENT_WARNING_SENSOR, DOMAIN, ) from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME +from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, STATE_HOME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry -DEMO_CONFIG_ENTRY: Final = { +DEMO_CONFIG_ENTRY_REGION: Final = { CONF_REGION_IDENTIFIER: "807111000", } -DEMO_YAML_CONFIGURATION: Final = { - CONF_NAME: "Unit Test", - CONF_REGION_NAME: "807111000", - CONF_MONITORED_CONDITIONS: [CURRENT_WARNING_SENSOR, ADVANCE_WARNING_SENSOR], +DEMO_CONFIG_ENTRY_GPS: Final = { + CONF_REGION_DEVICE_TRACKER: "device_tracker.test_gps", } pytestmark = pytest.mark.usefixtures("mock_setup_entry") -async def test_create_entry(hass: HomeAssistant) -> None: - """Test that the full config flow works.""" +async def test_create_entry_region(hass: HomeAssistant) -> None: + """Test that the full config flow works for a region identifier.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) @@ -45,7 +42,7 @@ async def test_create_entry(hass: HomeAssistant) -> None: return_value=False, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=DEMO_CONFIG_ENTRY + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_REGION ) # Test for invalid region identifier. @@ -58,7 +55,7 @@ async def test_create_entry(hass: HomeAssistant) -> None: return_value=True, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=DEMO_CONFIG_ENTRY + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_REGION ) # Test for successfully created entry. @@ -70,12 +67,95 @@ async def test_create_entry(hass: HomeAssistant) -> None: } +async def test_create_entry_gps( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test that the full config flow works for a device tracker.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + + # Test for missing registry entry error. + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "entity_not_found"} + + # Test for missing device tracker error. + registry_entry = entity_registry.async_get_or_create( + "device_tracker", DOMAIN, "uuid", suggested_object_id="test_gps" + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "entity_not_found"} + + # Test for missing attribute error. + hass.states.async_set( + DEMO_CONFIG_ENTRY_GPS[CONF_REGION_DEVICE_TRACKER], + STATE_HOME, + {ATTR_LONGITUDE: "7.610263"}, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "attribute_not_found"} + + # Test for invalid provided identifier. + hass.states.async_set( + DEMO_CONFIG_ENTRY_GPS[CONF_REGION_DEVICE_TRACKER], + STATE_HOME, + {ATTR_LATITUDE: "50.180454", ATTR_LONGITUDE: "7.610263"}, + ) + + with patch( + "homeassistant.components.dwd_weather_warnings.config_flow.DwdWeatherWarningsAPI", + return_value=False, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "invalid_identifier"} + + # Test for successfully created entry. + with patch( + "homeassistant.components.dwd_weather_warnings.config_flow.DwdWeatherWarningsAPI", + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_GPS + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == "test_gps" + assert result["data"] == { + CONF_REGION_DEVICE_TRACKER: registry_entry.id, + } + + async def test_config_flow_already_configured(hass: HomeAssistant) -> None: """Test aborting, if the warncell ID / name is already configured during the config.""" entry = MockConfigEntry( domain=DOMAIN, - data=DEMO_CONFIG_ENTRY.copy(), - unique_id=DEMO_CONFIG_ENTRY[CONF_REGION_IDENTIFIER], + data=DEMO_CONFIG_ENTRY_REGION.copy(), + unique_id=DEMO_CONFIG_ENTRY_REGION[CONF_REGION_IDENTIFIER], ) entry.add_to_hass(hass) @@ -92,9 +172,40 @@ async def test_config_flow_already_configured(hass: HomeAssistant) -> None: return_value=True, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=DEMO_CONFIG_ENTRY + result["flow_id"], user_input=DEMO_CONFIG_ENTRY_REGION ) await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_config_flow_with_errors(hass: HomeAssistant) -> None: + """Test error scenarios during the configuration.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + + # Test error for empty input data. + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "no_identifier"} + + # Test error for setting both options during configuration. + demo_input = DEMO_CONFIG_ENTRY_REGION.copy() + demo_input.update(DEMO_CONFIG_ENTRY_GPS.copy()) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=demo_input, + ) + + await hass.async_block_till_done() + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "ambiguous_identifier"} diff --git a/tests/components/dwd_weather_warnings/test_init.py b/tests/components/dwd_weather_warnings/test_init.py index db7afaadec9..bfd03b2fdd4 100644 --- a/tests/components/dwd_weather_warnings/test_init.py +++ b/tests/components/dwd_weather_warnings/test_init.py @@ -4,26 +4,40 @@ from typing import Final from homeassistant.components.dwd_weather_warnings.const import ( ADVANCE_WARNING_SENSOR, + CONF_REGION_DEVICE_TRACKER, CONF_REGION_IDENTIFIER, CURRENT_WARNING_SENSOR, DOMAIN, ) from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME +from homeassistant.const import ( + ATTR_LATITUDE, + ATTR_LONGITUDE, + CONF_MONITORED_CONDITIONS, + CONF_NAME, + STATE_HOME, +) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry -DEMO_CONFIG_ENTRY: Final = { +DEMO_IDENTIFIER_CONFIG_ENTRY: Final = { CONF_NAME: "Unit Test", CONF_REGION_IDENTIFIER: "807111000", CONF_MONITORED_CONDITIONS: [CURRENT_WARNING_SENSOR, ADVANCE_WARNING_SENSOR], } +DEMO_TRACKER_CONFIG_ENTRY: Final = { + CONF_NAME: "Unit Test", + CONF_REGION_DEVICE_TRACKER: "device_tracker.test_gps", + CONF_MONITORED_CONDITIONS: [CURRENT_WARNING_SENSOR, ADVANCE_WARNING_SENSOR], +} + async def test_load_unload_entry(hass: HomeAssistant) -> None: - """Test loading and unloading the integration.""" - entry = MockConfigEntry(domain=DOMAIN, data=DEMO_CONFIG_ENTRY) + """Test loading and unloading the integration with a region identifier based entry.""" + entry = MockConfigEntry(domain=DOMAIN, data=DEMO_IDENTIFIER_CONFIG_ENTRY) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -36,3 +50,68 @@ async def test_load_unload_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.NOT_LOADED assert entry.entry_id not in hass.data[DOMAIN] + + +async def test_load_invalid_registry_entry(hass: HomeAssistant) -> None: + """Test loading the integration with an invalid registry entry ID.""" + INVALID_DATA = DEMO_TRACKER_CONFIG_ENTRY.copy() + INVALID_DATA[CONF_REGION_DEVICE_TRACKER] = "invalid_registry_id" + entry = MockConfigEntry(domain=DOMAIN, data=INVALID_DATA) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_load_missing_device_tracker(hass: HomeAssistant) -> None: + """Test loading the integration with a missing device tracker.""" + entry = MockConfigEntry(domain=DOMAIN, data=DEMO_TRACKER_CONFIG_ENTRY) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_load_missing_required_attribute(hass: HomeAssistant) -> None: + """Test loading the integration with a device tracker missing a required attribute.""" + entry = MockConfigEntry(domain=DOMAIN, data=DEMO_TRACKER_CONFIG_ENTRY) + entry.add_to_hass(hass) + + hass.states.async_set( + DEMO_TRACKER_CONFIG_ENTRY[CONF_REGION_DEVICE_TRACKER], + STATE_HOME, + {ATTR_LONGITUDE: "7.610263"}, + ) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == ConfigEntryState.SETUP_RETRY + + +async def test_load_valid_device_tracker( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test loading the integration with a valid device tracker based entry.""" + entry = MockConfigEntry(domain=DOMAIN, data=DEMO_TRACKER_CONFIG_ENTRY) + entry.add_to_hass(hass) + entity_registry.async_get_or_create( + "device_tracker", + entry.domain, + "uuid", + suggested_object_id="test_gps", + config_entry=entry, + ) + + hass.states.async_set( + DEMO_TRACKER_CONFIG_ENTRY[CONF_REGION_DEVICE_TRACKER], + STATE_HOME, + {ATTR_LATITUDE: "50.180454", ATTR_LONGITUDE: "7.610263"}, + ) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state == ConfigEntryState.LOADED + assert entry.entry_id in hass.data[DOMAIN] diff --git a/tests/components/ecobee/common.py b/tests/components/ecobee/common.py index 60f17c3618d..423b0eee320 100644 --- a/tests/components/ecobee/common.py +++ b/tests/components/ecobee/common.py @@ -4,14 +4,19 @@ from unittest.mock import patch from homeassistant.components.ecobee.const import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -async def setup_platform(hass, platform) -> MockConfigEntry: +async def setup_platform( + hass: HomeAssistant, + platform: str, +) -> MockConfigEntry: """Set up the ecobee platform.""" mock_entry = MockConfigEntry( + title=DOMAIN, domain=DOMAIN, data={ CONF_API_KEY: "ABC123", @@ -22,7 +27,6 @@ async def setup_platform(hass, platform) -> MockConfigEntry: with patch("homeassistant.components.ecobee.const.PLATFORMS", [platform]): assert await async_setup_component(hass, DOMAIN, {}) - - await hass.async_block_till_done() + await hass.async_block_till_done() return mock_entry diff --git a/tests/components/ecobee/conftest.py b/tests/components/ecobee/conftest.py index 952c2f3fba3..27d5a949c58 100644 --- a/tests/components/ecobee/conftest.py +++ b/tests/components/ecobee/conftest.py @@ -1,12 +1,13 @@ """Fixtures for tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from homeassistant.components.ecobee import ECOBEE_API_KEY, ECOBEE_REFRESH_TOKEN -from tests.common import load_fixture +from tests.common import load_fixture, load_json_object_fixture @pytest.fixture(autouse=True) @@ -23,11 +24,15 @@ def requests_mock_fixture(requests_mock): @pytest.fixture -def mock_ecobee(): +def mock_ecobee() -> Generator[None, MagicMock]: """Mock an Ecobee object.""" ecobee = MagicMock() ecobee.request_pin.return_value = True ecobee.refresh_tokens.return_value = True + ecobee.thermostats = load_json_object_fixture("ecobee-data.json", "ecobee")[ + "thermostatList" + ] + ecobee.get_thermostat = lambda index: ecobee.thermostats[index] ecobee.config = {ECOBEE_API_KEY: "mocked_key", ECOBEE_REFRESH_TOKEN: "mocked_token"} with patch("homeassistant.components.ecobee.Ecobee", return_value=ecobee): diff --git a/tests/components/ecobee/test_climate.py b/tests/components/ecobee/test_climate.py index 7ea9950e2d4..46ca77025cc 100644 --- a/tests/components/ecobee/test_climate.py +++ b/tests/components/ecobee/test_climate.py @@ -441,7 +441,7 @@ async def test_preset_indefinite_away(ecobee_fixture, thermostat) -> None: """Test indefinite away showing correctly, and not as temporary away.""" ecobee_fixture["program"]["currentClimateRef"] = "away" ecobee_fixture["events"][0]["holdClimateRef"] = "away" - assert thermostat.preset_mode == "Away" + assert thermostat.preset_mode == "away" ecobee_fixture["events"][0]["endDate"] = "2999-01-01" assert thermostat.preset_mode == PRESET_AWAY_INDEFINITELY diff --git a/tests/components/ecobee/test_notify.py b/tests/components/ecobee/test_notify.py new file mode 100644 index 00000000000..c66f04c752a --- /dev/null +++ b/tests/components/ecobee/test_notify.py @@ -0,0 +1,57 @@ +"""Test Ecobee notify service.""" + +from unittest.mock import MagicMock + +from homeassistant.components.ecobee import DOMAIN +from homeassistant.components.notify import ( + DOMAIN as NOTIFY_DOMAIN, + SERVICE_SEND_MESSAGE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from .common import setup_platform + +THERMOSTAT_ID = 0 + + +async def test_notify_entity_service( + hass: HomeAssistant, + mock_ecobee: MagicMock, +) -> None: + """Test the notify entity service.""" + await setup_platform(hass, NOTIFY_DOMAIN) + + entity_id = "notify.ecobee" + state = hass.states.get(entity_id) + assert state is not None + assert hass.services.has_service(NOTIFY_DOMAIN, SERVICE_SEND_MESSAGE) + await hass.services.async_call( + NOTIFY_DOMAIN, + SERVICE_SEND_MESSAGE, + service_data={"entity_id": entity_id, "message": "It is too cold!"}, + blocking=True, + ) + await hass.async_block_till_done() + mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + + +async def test_legacy_notify_service( + hass: HomeAssistant, + mock_ecobee: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the legacy notify service.""" + await setup_platform(hass, NOTIFY_DOMAIN) + + assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + DOMAIN, + service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, + blocking=True, + ) + await hass.async_block_till_done() + mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + mock_ecobee.send_message.reset_mock() + assert len(issue_registry.issues) == 1 diff --git a/tests/components/ecobee/test_repairs.py b/tests/components/ecobee/test_repairs.py new file mode 100644 index 00000000000..19fdc6f7bba --- /dev/null +++ b/tests/components/ecobee/test_repairs.py @@ -0,0 +1,79 @@ +"""Test repairs for Ecobee integration.""" + +from http import HTTPStatus +from unittest.mock import MagicMock + +from homeassistant.components.ecobee import DOMAIN +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from .common import setup_platform + +from tests.typing import ClientSessionGenerator + +THERMOSTAT_ID = 0 + + +async def test_ecobee_repair_flow( + hass: HomeAssistant, + mock_ecobee: MagicMock, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the ecobee notify service repair flow is triggered.""" + await setup_platform(hass, NOTIFY_DOMAIN) + await async_process_repairs_platforms(hass) + + http_client = await hass_client() + + # Simulate legacy service being used + assert hass.services.has_service(NOTIFY_DOMAIN, DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + DOMAIN, + service_data={"message": "It is too cold!", "target": THERMOSTAT_ID}, + blocking=True, + ) + await hass.async_block_till_done() + mock_ecobee.send_message.assert_called_with(THERMOSTAT_ID, "It is too cold!") + mock_ecobee.send_message.reset_mock() + + # Assert the issue is present + assert issue_registry.async_get_issue( + domain=DOMAIN, + issue_id="migrate_notify", + ) + assert len(issue_registry.issues) == 1 + + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": DOMAIN, "issue_id": "migrate_notify"} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await http_client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["type"] == "create_entry" + # Test confirm step in repair flow + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue( + domain=DOMAIN, + issue_id="migrate_notify", + ) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/ecovacs/snapshots/test_button.ambr b/tests/components/ecovacs/snapshots/test_button.ambr index 816551f7e6a..d250a60a35f 100644 --- a/tests/components/ecovacs/snapshots/test_button.ambr +++ b/tests/components/ecovacs/snapshots/test_button.ambr @@ -1,4 +1,96 @@ # serializer version: 1 +# name: test_buttons[5xu9h3][button.goat_g1_reset_blade_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.goat_g1_reset_blade_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset blade lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_blade', + 'unique_id': '8516fbb1-17f1-4194-0000000_reset_lifespan_blade', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[5xu9h3][button.goat_g1_reset_blade_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Reset blade lifespan', + }), + 'context': , + 'entity_id': 'button.goat_g1_reset_blade_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- +# name: test_buttons[5xu9h3][button.goat_g1_reset_lens_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.goat_g1_reset_lens_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset lens brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_lifespan_lens_brush', + 'unique_id': '8516fbb1-17f1-4194-0000000_reset_lifespan_lens_brush', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[5xu9h3][button.goat_g1_reset_lens_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Reset lens brush lifespan', + }), + 'context': , + 'entity_id': 'button.goat_g1_reset_lens_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:00+00:00', + }) +# --- # name: test_buttons[yna5x1][button.ozmo_950_relocate:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index b35310158f2..e2cee3d410f 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -1,5 +1,583 @@ # serializer version: 1 -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_area_cleaned:entity-registry] +# name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_area_cleaned', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Area cleaned', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stats_area', + 'unique_id': '8516fbb1-17f1-4194-0000000_stats_area', + 'unit_of_measurement': 'm²', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Area cleaned', + 'unit_of_measurement': 'm²', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_area_cleaned', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_battery:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '8516fbb1-17f1-4194-0000000_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_battery:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Goat G1 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_blade_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_blade_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Blade lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_blade', + 'unique_id': '8516fbb1-17f1-4194-0000000_lifespan_blade', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_blade_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Blade lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_blade_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_cleaning_duration:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_cleaning_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cleaning duration', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stats_time', + 'unique_id': '8516fbb1-17f1-4194-0000000_stats_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_cleaning_duration:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Goat G1 Cleaning duration', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.goat_g1_cleaning_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.0', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_error:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error', + 'unique_id': '8516fbb1-17f1-4194-0000000_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_error:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'description': 'NoError: Robot is operational', + 'friendly_name': 'Goat G1 Error', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_ip_address:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_ip', + 'unique_id': '8516fbb1-17f1-4194-0000000_network_ip', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_ip_address:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 IP address', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '192.168.0.10', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_lens_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_lens_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lens brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_lens_brush', + 'unique_id': '8516fbb1-17f1-4194-0000000_lifespan_lens_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_lens_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Lens brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_lens_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_total_area_cleaned', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total area cleaned', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_area', + 'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_area', + 'unit_of_measurement': 'm²', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Total area cleaned', + 'state_class': , + 'unit_of_measurement': 'm²', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_total_area_cleaned', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_cleaning_duration:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_total_cleaning_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total cleaning duration', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_time', + 'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_cleaning_duration:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Goat G1 Total cleaning duration', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.goat_g1_total_cleaning_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.000', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_cleanings:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.goat_g1_total_cleanings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total cleanings', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_stats_cleanings', + 'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_cleanings', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_total_cleanings:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Total cleanings', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.goat_g1_total_cleanings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_wi_fi_rssi:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_wi_fi_rssi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi RSSI', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_rssi', + 'unique_id': '8516fbb1-17f1-4194-0000000_network_rssi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_wi_fi_rssi:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Wi-Fi RSSI', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_wi_fi_rssi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-62', + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_wi_fi_ssid:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.goat_g1_wi_fi_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wi-Fi SSID', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'network_ssid', + 'unique_id': '8516fbb1-17f1-4194-0000000_network_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[5xu9h3][sensor.goat_g1_wi_fi_ssid:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Wi-Fi SSID', + }), + 'context': , + 'entity_id': 'sensor.goat_g1_wi_fi_ssid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Testnetwork', + }) +# --- +# name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -32,7 +610,7 @@ 'unit_of_measurement': 'm²', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_area_cleaned:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Area cleaned', @@ -46,7 +624,7 @@ 'state': '10', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_battery:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_battery:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -79,7 +657,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_battery:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_battery:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', @@ -94,7 +672,7 @@ 'state': '100', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_cleaning_duration:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_cleaning_duration:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -130,7 +708,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_cleaning_duration:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_cleaning_duration:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -145,7 +723,7 @@ 'state': '5.0', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_error:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_error:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -178,7 +756,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_error:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_error:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'description': 'NoError: Robot is operational', @@ -192,7 +770,7 @@ 'state': '0', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_filter_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_filter_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -225,7 +803,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_filter_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_filter_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Filter lifespan', @@ -239,7 +817,7 @@ 'state': '56', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_ip_address:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_ip_address:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -272,7 +850,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_ip_address:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_ip_address:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 IP address', @@ -285,7 +863,7 @@ 'state': '192.168.0.10', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_main_brush_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_main_brush_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -318,7 +896,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_main_brush_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_main_brush_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Main brush lifespan', @@ -332,7 +910,7 @@ 'state': '80', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_side_brushes_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -365,7 +943,7 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_side_brushes_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Side brushes lifespan', @@ -379,7 +957,7 @@ 'state': '40', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_area_cleaned:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -414,7 +992,7 @@ 'unit_of_measurement': 'm²', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_area_cleaned:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Total area cleaned', @@ -429,7 +1007,7 @@ 'state': '60', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_cleaning_duration:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_cleaning_duration:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -467,7 +1045,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_cleaning_duration:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_cleaning_duration:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -483,7 +1061,7 @@ 'state': '40.000', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_cleanings:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_cleanings:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -518,7 +1096,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_total_cleanings:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_total_cleanings:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Total cleanings', @@ -532,7 +1110,7 @@ 'state': '123', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_wi_fi_rssi:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_wi_fi_rssi:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -565,7 +1143,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_wi_fi_rssi:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_wi_fi_rssi:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Wi-Fi RSSI', @@ -578,7 +1156,7 @@ 'state': '-62', }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_wi_fi_ssid:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_wi_fi_ssid:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -611,7 +1189,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors[yna5x1-entity_ids0][sensor.ozmo_950_wi_fi_ssid:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_wi_fi_ssid:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Wi-Fi SSID', diff --git a/tests/components/ecovacs/test_button.py b/tests/components/ecovacs/test_button.py index 8e583e6342b..277983eb0c5 100644 --- a/tests/components/ecovacs/test_button.py +++ b/tests/components/ecovacs/test_button.py @@ -48,8 +48,21 @@ def platforms() -> Platform | list[Platform]: ), ], ), + ( + "5xu9h3", + [ + ( + "button.goat_g1_reset_blade_lifespan", + ResetLifeSpan(LifeSpan.BLADE), + ), + ( + "button.goat_g1_reset_lens_brush_lifespan", + ResetLifeSpan(LifeSpan.LENS_BRUSH), + ), + ], + ), ], - ids=["yna5x1"], + ids=["yna5x1", "5xu9h3"], ) async def test_buttons( hass: HomeAssistant, @@ -98,6 +111,13 @@ async def test_buttons( "button.ozmo_950_reset_side_brushes_lifespan", ], ), + ( + "5xu9h3", + [ + "button.goat_g1_reset_blade_lifespan", + "button.goat_g1_reset_lens_brush_lifespan", + ], + ), ], ) async def test_disabled_by_default_buttons( diff --git a/tests/components/ecovacs/test_event.py b/tests/components/ecovacs/test_event.py index 0e7adaad954..104a3bfc69e 100644 --- a/tests/components/ecovacs/test_event.py +++ b/tests/components/ecovacs/test_event.py @@ -76,7 +76,7 @@ async def test_last_job( await notify_and_wait( hass, event_bus, - ReportStatsEvent(0, 1, "spotArea", "3", CleanJobStatus.MANUAL_STOPPED, [1]), + ReportStatsEvent(0, 1, "spotArea", "3", CleanJobStatus.MANUALLY_STOPPED, [1]), ) assert (state := hass.states.get(state.entity_id)) diff --git a/tests/components/ecovacs/test_init.py b/tests/components/ecovacs/test_init.py index 7780b86d714..c27da2196b1 100644 --- a/tests/components/ecovacs/test_init.py +++ b/tests/components/ecovacs/test_init.py @@ -122,7 +122,7 @@ async def test_devices_in_dr( ("device_fixture", "entities"), [ ("yna5x1", 26), - ("5xu9h3", 20), + ("5xu9h3", 24), ], ) async def test_all_entities_loaded( diff --git a/tests/components/ecovacs/test_sensor.py b/tests/components/ecovacs/test_sensor.py index 7ff4ab3f009..5b8bf18e1d8 100644 --- a/tests/components/ecovacs/test_sensor.py +++ b/tests/components/ecovacs/test_sensor.py @@ -69,7 +69,25 @@ async def notify_events(hass: HomeAssistant, event_bus: EventBus): "sensor.ozmo_950_error", ], ), + ( + "5xu9h3", + [ + "sensor.goat_g1_area_cleaned", + "sensor.goat_g1_cleaning_duration", + "sensor.goat_g1_total_area_cleaned", + "sensor.goat_g1_total_cleaning_duration", + "sensor.goat_g1_total_cleanings", + "sensor.goat_g1_battery", + "sensor.goat_g1_ip_address", + "sensor.goat_g1_wi_fi_rssi", + "sensor.goat_g1_wi_fi_ssid", + "sensor.goat_g1_blade_lifespan", + "sensor.goat_g1_lens_brush_lifespan", + "sensor.goat_g1_error", + ], + ), ], + ids=["yna5x1", "5xu9h3"], ) async def test_sensors( hass: HomeAssistant, @@ -111,7 +129,17 @@ async def test_sensors( "sensor.ozmo_950_wi_fi_ssid", ], ), + ( + "5xu9h3", + [ + "sensor.goat_g1_error", + "sensor.goat_g1_ip_address", + "sensor.goat_g1_wi_fi_rssi", + "sensor.goat_g1_wi_fi_ssid", + ], + ), ], + ids=["yna5x1", "5xu9h3"], ) async def test_disabled_by_default_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, entity_ids: list[str] diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 40d409aea8e..965af3b40fc 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -2,6 +2,7 @@ from unittest.mock import AsyncMock, Mock, patch +import jwt from pyenphase import ( Envoy, EnvoyData, @@ -342,7 +343,7 @@ def mock_envoy_fixture( @pytest.fixture(name="setup_enphase_envoy") -async def setup_enphase_envoy_fixture(hass, config, mock_envoy): +async def setup_enphase_envoy_fixture(hass: HomeAssistant, config, mock_envoy): """Define a fixture to set up Enphase Envoy.""" with ( patch( @@ -368,7 +369,10 @@ def mock_authenticate(): @pytest.fixture(name="mock_auth") def mock_auth(serial_number): """Define a mocked EnvoyAuth fixture.""" - return EnvoyTokenAuth("127.0.0.1", token="abc", envoy_serial=serial_number) + token = jwt.encode( + payload={"name": "envoy", "exp": 1907837780}, key="secret", algorithm="HS256" + ) + return EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial=serial_number) @pytest.fixture(name="mock_setup") diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index 7af0cd584a4..2709087a543 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -1,6 +1,7 @@ """Test the Enphase Envoy config flow.""" from ipaddress import ip_address +import logging from unittest.mock import AsyncMock from pyenphase import EnvoyAuthenticationError, EnvoyError @@ -13,6 +14,10 @@ from homeassistant.components.enphase_envoy.const import DOMAIN, PLATFORMS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + +_LOGGER = logging.getLogger(__name__) + async def test_form(hass: HomeAssistant, config, setup_enphase_envoy) -> None: """Test we get the form.""" @@ -324,9 +329,13 @@ async def test_form_host_already_exists( async def test_zeroconf_serial_already_exists( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry, + setup_enphase_envoy, + caplog: pytest.LogCaptureFixture, ) -> None: """Test serial number already exists from zeroconf.""" + _LOGGER.setLevel(logging.DEBUG) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_ZEROCONF}, @@ -345,6 +354,7 @@ async def test_zeroconf_serial_already_exists( assert result["reason"] == "already_configured" assert config_entry.data["host"] == "4.4.4.4" + assert "Zeroconf ip 4 processing 4.4.4.4, current hosts: {'1.1.1.1'}" in caplog.text async def test_zeroconf_serial_already_exists_ignores_ipv6( @@ -397,6 +407,233 @@ async def test_zeroconf_host_already_exists( assert config_entry.title == "Envoy 1234" +async def test_zero_conf_while_form( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test zeroconf while form is active.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protovers": "7.0.1"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.unique_id == "1234" + assert config_entry.title == "Envoy 1234" + + +async def test_zero_conf_second_envoy_while_form( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test zeroconf while form is active.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("4.4.4.4"), + ip_addresses=[ip_address("4.4.4.4")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "4321", "protovers": "7.0.1"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.unique_id == "1234" + assert config_entry.title == "Envoy 1234" + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + "host": "4.4.4.4", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Envoy 4321" + assert result3["result"].unique_id == "4321" + + result4 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result4["type"] is FlowResultType.ABORT + + +async def test_zero_conf_malformed_serial_property( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test malformed zeroconf properties.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + with pytest.raises(KeyError) as ex: + await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serilnum": "1234", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert "serialnum" in str(ex.value) + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.ABORT + + +async def test_zero_conf_malformed_serial( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test malformed zeroconf properties.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "12%4", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + "host": "1.1.1.1", + "username": "test-username", + "password": "test-password", + }, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Envoy 12%4" + + +async def test_zero_conf_malformed_fw_property( + hass: HomeAssistant, config_entry, setup_enphase_envoy +) -> None: + """Test malformed zeroconf property.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protvers": "7.1.2"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.unique_id == "1234" + assert config_entry.title == "Envoy 1234" + + +async def test_zero_conf_old_blank_entry( + hass: HomeAssistant, setup_enphase_envoy +) -> None: + """Test re-using old blank entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + "host": "1.1.1.1", + "username": "", + "password": "", + "name": "unknown", + }, + unique_id=None, + title="Envoy", + ) + entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert entry.data["host"] == "1.1.1.1" + assert entry.unique_id == "1234" + assert entry.title == "Envoy 1234" + + async def test_reauth(hass: HomeAssistant, config_entry, setup_enphase_envoy) -> None: """Test we reauth auth.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/epic_games_store/__init__.py b/tests/components/epic_games_store/__init__.py new file mode 100644 index 00000000000..1c5baf3704f --- /dev/null +++ b/tests/components/epic_games_store/__init__.py @@ -0,0 +1 @@ +"""Tests for the Epic Games Store integration.""" diff --git a/tests/components/epic_games_store/common.py b/tests/components/epic_games_store/common.py new file mode 100644 index 00000000000..95191ad97f9 --- /dev/null +++ b/tests/components/epic_games_store/common.py @@ -0,0 +1,31 @@ +"""Common methods used across tests for Epic Games Store.""" + +from unittest.mock import patch + +from homeassistant.components.epic_games_store.const import DOMAIN +from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .const import MOCK_COUNTRY, MOCK_LANGUAGE + +from tests.common import MockConfigEntry + + +async def setup_platform(hass: HomeAssistant, platform: str) -> MockConfigEntry: + """Set up the Epic Games Store platform.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + unique_id=f"freegames-{MOCK_LANGUAGE}-{MOCK_COUNTRY}", + ) + mock_entry.add_to_hass(hass) + + with patch("homeassistant.components.epic_games_store.PLATFORMS", [platform]): + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + return mock_entry diff --git a/tests/components/epic_games_store/conftest.py b/tests/components/epic_games_store/conftest.py new file mode 100644 index 00000000000..e02997a429e --- /dev/null +++ b/tests/components/epic_games_store/conftest.py @@ -0,0 +1,44 @@ +"""Define fixtures for Epic Games Store tests.""" + +from unittest.mock import Mock, patch + +import pytest + +from .const import ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND, + DATA_FREE_GAMES, + DATA_FREE_GAMES_CHRISTMAS_SPECIAL, +) + + +@pytest.fixture(name="service_multiple") +def mock_service_multiple(): + """Mock a successful service with multiple free & discount games.""" + with patch( + "homeassistant.components.epic_games_store.coordinator.EpicGamesStoreAPI" + ) as service_mock: + instance = service_mock.return_value + instance.get_free_games = Mock(return_value=DATA_FREE_GAMES) + yield service_mock + + +@pytest.fixture(name="service_christmas_special") +def mock_service_christmas_special(): + """Mock a successful service with Christmas special case.""" + with patch( + "homeassistant.components.epic_games_store.coordinator.EpicGamesStoreAPI" + ) as service_mock: + instance = service_mock.return_value + instance.get_free_games = Mock(return_value=DATA_FREE_GAMES_CHRISTMAS_SPECIAL) + yield service_mock + + +@pytest.fixture(name="service_attribute_not_found") +def mock_service_attribute_not_found(): + """Mock a successful service returning a not found attribute error with free & discount games.""" + with patch( + "homeassistant.components.epic_games_store.coordinator.EpicGamesStoreAPI" + ) as service_mock: + instance = service_mock.return_value + instance.get_free_games = Mock(return_value=DATA_ERROR_ATTRIBUTE_NOT_FOUND) + yield service_mock diff --git a/tests/components/epic_games_store/const.py b/tests/components/epic_games_store/const.py new file mode 100644 index 00000000000..dcd82c7e03e --- /dev/null +++ b/tests/components/epic_games_store/const.py @@ -0,0 +1,25 @@ +"""Test constants.""" + +from homeassistant.components.epic_games_store.const import DOMAIN + +from tests.common import load_json_object_fixture + +MOCK_LANGUAGE = "fr" +MOCK_COUNTRY = "FR" + +DATA_ERROR_ATTRIBUTE_NOT_FOUND = load_json_object_fixture( + "error_1004_attribute_not_found.json", DOMAIN +) + +DATA_ERROR_WRONG_COUNTRY = load_json_object_fixture( + "error_5222_wrong_country.json", DOMAIN +) + +# free games +DATA_FREE_GAMES = load_json_object_fixture("free_games.json", DOMAIN) + +DATA_FREE_GAMES_ONE = load_json_object_fixture("free_games_one.json", DOMAIN) + +DATA_FREE_GAMES_CHRISTMAS_SPECIAL = load_json_object_fixture( + "free_games_christmas_special.json", DOMAIN +) diff --git a/tests/components/epic_games_store/fixtures/error_1004_attribute_not_found.json b/tests/components/epic_games_store/fixtures/error_1004_attribute_not_found.json new file mode 100644 index 00000000000..6cb14608c2b --- /dev/null +++ b/tests/components/epic_games_store/fixtures/error_1004_attribute_not_found.json @@ -0,0 +1,1026 @@ +{ + "errors": [ + { + "message": "CatalogOffer/offerMappings: Request failed with status code 404", + "locations": [ + { + "line": 73, + "column": 17 + } + ], + "correlationId": "0451aa13-b1d6-4f90-8ca5-d12bf917675a", + "serviceResponse": "{\"errorMessage\":\"The item or resource being requested could not be found.\",\"errorCode\":\"errors.com.epicgames.not_found\",\"numericErrorCode\":1004,\"errorStatus\":404}", + "stack": null, + "path": ["Catalog", "searchStore", "elements", 4, "offerMappings"] + }, + { + "message": "CatalogNamespace/mappings: Request failed with status code 404", + "locations": [ + { + "line": 68, + "column": 19 + } + ], + "correlationId": "0451aa13-b1d6-4f90-8ca5-d12bf917675a", + "serviceResponse": "{\"errorMessage\":\"The item or resource being requested could not be found.\",\"errorCode\":\"errors.com.epicgames.not_found\",\"numericErrorCode\":1004,\"errorStatus\":404}", + "stack": null, + "path": ["Catalog", "searchStore", "elements", 4, "catalogNs", "mappings"] + } + ], + "data": { + "Catalog": { + "searchStore": { + "elements": [ + { + "title": "Godlike Burger", + "id": "d9300ace164b41ac90a7b54e59d47953", + "namespace": "beb7e64d3da74ae780405da48cccb581", + "description": "Dans Godlike Burger, vous g\u00e9rez le restaurant le plus d\u00e9ment de la galaxie\u00a0! Assommez, empoisonnez et tuez les clients... pour les transformer en steaks\u00a0! Mais nulle crainte\u00a0: la client\u00e8le alien reviendra si vous la jouez fine, car c'est trop bon de s'adonner au cannibalisme.", + "effectiveDate": "2022-04-21T17:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "viewableDate": "2022-03-28T18:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/f42598038b9343e58d27e0a8c0b831b6/godlike-burger-offer-1trpc.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/f42598038b9343e58d27e0a8c0b831b6/download-godlike-burger-offer-8u2uh.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/f42598038b9343e58d27e0a8c0b831b6/download-godlike-burger-offer-8u2uh.jpg" + } + ], + "seller": { + "id": "o-d2ygr9bjcjfebgt8842wvvbmswympz", + "name": "Daedalic Entertainment" + }, + "productSlug": null, + "urlSlug": "37b001690e2a4d6f872567cdd06f0c6f", + "url": null, + "items": [ + { + "id": "c027f1bc9db54f189ad938634500e542", + "namespace": "beb7e64d3da74ae780405da48cccb581" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "false" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "21894" + }, + { + "id": "19847" + }, + { + "id": "1083" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "1263" + }, + { + "id": "10719" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "godlike-burger-4150a0", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "godlike-burger-4150a0", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 1999, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "19,99\u00a0\u20ac", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "1c2dc8194022428da305eedb42ed574d", + "endDate": "2023-10-12T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-05T15:00:00.000Z", + "endDate": "2023-10-12T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Destiny\u00a02\u00a0: Pack 30e anniversaire Bungie", + "id": "e7b9e222c7274dd28714aba2e06d2a01", + "namespace": "428115def4ca4deea9d69c99c5a5a99e", + "description": "Le Pack 30e anniversaire inclut un nouveau donjon, le lance-roquettes exotique Gjallarhorn, de nouvelles armes et armures, et plus encore. ", + "effectiveDate": "2022-08-23T13:00:00.000Z", + "offerType": "DLC", + "expiryDate": null, + "viewableDate": "2022-08-08T15:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_S4_1200x1600_1200x1600-04ebd49752c682d003014680f3d5be18" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_S3_2560x1440_2560x1440-b2f882323923927c414ab23faf1022ca" + }, + { + "type": "ProductLogo", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_OfferLogo_200x200_200x200-234225abe0aca2bfa7f5c5bc6e6fe348" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_S4_1200x1600_1200x1600-04ebd49752c682d003014680f3d5be18" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/FR_Bungie_Bungie_30th_Anniversary_Pack_S3_2560x1440_2560x1440-b2f882323923927c414ab23faf1022ca" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot1_1920x1080-37c070caa0106b08910518150bf96e94" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot2_1920x1080-14490e3ec01dceedce23d870774b2393" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot3_1920x1080-fdf882ad2cc98be7e63516b4ad28d6e9" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot4_1920x1080-079d4e12a8a04b31f7d4def7f4b745e7" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot5_1920x1080-f3c958c685629b6678544cba8bffc483" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot6_1920x1080-f13bb310baf9c158d15d473474c11586" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot7_1920x1080-6d2b714d2cfd64623cdcc39487d0b429" + }, + { + "type": "featuredMedia", + "url": "https://cdn1.epicgames.com/offer/428115def4ca4deea9d69c99c5a5a99e/Screenshot8_1920x1080-0956ff1a3a4969d9a3f2b96d87bdc19d" + } + ], + "seller": { + "id": "o-49lqsefbl6zr5sy3ztak77ej97cuvh", + "name": "Bungie" + }, + "productSlug": null, + "urlSlug": "destiny-2--bungie-30th-anniversary-pack", + "url": null, + "items": [ + { + "id": "904b57fb8bcd41a6be6c690a92ab3c15", + "namespace": "428115def4ca4deea9d69c99c5a5a99e" + } + ], + "customAttributes": [], + "categories": [ + { + "path": "addons" + }, + { + "path": "freegames" + }, + { + "path": "addons/durable" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1203" + }, + { + "id": "1210" + }, + { + "id": "1370" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "destiny-2", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "destiny-2--bungie-30th-anniversary-pack", + "pageType": "addon--cms-hybrid" + } + ], + "price": { + "totalPrice": { + "discountPrice": 2499, + "originalPrice": 2499, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "24,99\u00a0\u20ac", + "discountPrice": "24,99\u00a0\u20ac", + "intermediatePrice": "24,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-11T16:00:00.000Z", + "endDate": "2023-10-25T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 60 + } + } + ] + } + ] + } + }, + { + "title": "Gloomhaven", + "id": "9232fdbc352445cc820a54bdc97ed2bb", + "namespace": "bc079f73f020432fac896d30c8e2c330", + "description": "Que vous soyez arriv\u00e9s \u00e0 Gloomhaven en r\u00e9pondant \u00e0 l'appel de l'aventure ou au d\u00e9sir cupide de l'\u00e9clat de l'or, votre destin n'en sera pas chang\u00e9...", + "effectiveDate": "2022-09-22T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "viewableDate": "2022-09-22T15:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/ef2777467a3c49059a076e42fd9b41f0/gloomhaven-offer-1j9mc.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/ef2777467a3c49059a076e42fd9b41f0/download-gloomhaven-offer-1ho2x.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/ef2777467a3c49059a076e42fd9b41f0/download-gloomhaven-offer-1ho2x.jpg" + } + ], + "seller": { + "id": "o-4x4bpaww55p5g3f6xpyqe2cneqxd5d", + "name": "Asmodee" + }, + "productSlug": null, + "urlSlug": "0d48da287df14493a7415b560ec1bbb3", + "url": null, + "items": [ + { + "id": "6047532dd78a456593d0ffd6602a7218", + "namespace": "bc079f73f020432fac896d30c8e2c330" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetViewableDate", + "value": "true" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + }, + { + "key": "isBlockchainUsed", + "value": "false" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "29088" + }, + { + "id": "21122" + }, + { + "id": "1188" + }, + { + "id": "21127" + }, + { + "id": "19847" + }, + { + "id": "21129" + }, + { + "id": "1386" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "1264" + }, + { + "id": "21137" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "16979" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1367" + }, + { + "id": "22776" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "21147" + }, + { + "id": "21149" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "gloomhaven-92f741", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "gloomhaven-92f741", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 3499, + "originalPrice": 3499, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "34,99\u00a0\u20ac", + "discountPrice": "34,99\u00a0\u20ac", + "intermediatePrice": "34,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "911 Operator", + "id": "268fd6ea355740d6ba4c76c3ffd4cbe0", + "namespace": "d923c737f0d243ccab407605ea40d39e", + "description": "911 OPERATOR est un jeu o\u00f9 tu deviens op\u00e9rateur de la ligne des urgences et o\u00f9 tu r\u00e9sous des incidents en fournissant des instruction et en g\u00e9rant des \u00e9quipes de secours. Tu peux jouer sur la carte de n\u2019importe quelle ville* du monde!", + "effectiveDate": "2023-09-14T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "viewableDate": "2023-09-07T15:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/c06cc46c27954f55974e9e7a4f3b3849/911-operator-omkv7.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/c06cc46c27954f55974e9e7a4f3b3849/911-operator-8dcp7.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/c06cc46c27954f55974e9e7a4f3b3849/911-operator-8dcp7.jpg" + } + ], + "seller": { + "id": "o-8dv8wz77w8tqnymmm8e99p28eny7kg", + "name": "Games Operators S.A." + }, + "productSlug": null, + "urlSlug": "ecb09cc5f55345e6bf6d3d9354c12876", + "url": null, + "items": [ + { + "id": "07499df5530b45c3ad8464a96cbe26c7", + "namespace": "d923c737f0d243ccab407605ea40d39e" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetViewableDate", + "value": "true" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + }, + { + "key": "isBlockchainUsed", + "value": "false" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + } + ], + "tags": [ + { + "id": "1393" + }, + { + "id": "19847" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "9547" + }, + { + "id": "1263" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "911-operator-585edd", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "911-operator-585edd", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1349, + "originalPrice": 1349, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "13,49\u00a0\u20ac", + "discountPrice": "13,49\u00a0\u20ac", + "intermediatePrice": "13,49\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-23T14:00:00.000Z", + "endDate": "2023-10-30T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 50 + } + } + ] + } + ] + } + }, + { + "title": "Q.U.B.E. ULTIMATE BUNDLE", + "id": "f18f14a76a874aa883a651fcc8c513d0", + "namespace": "0712c5eca64b47bbbced82cabba9f0d7", + "description": "Q.U.B.E. ULTIMATE BUNDLE", + "effectiveDate": "2023-10-12T15:00:00.000Z", + "offerType": "BUNDLE", + "expiryDate": null, + "viewableDate": "2023-10-05T14:25:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/0712c5eca64b47bbbced82cabba9f0d7/EGSBundle_Portrait_V2_1200x1600-981ac683de50fd5afed2c87dbc26494a" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/0712c5eca64b47bbbced82cabba9f0d7/EGSBundle_Landscape_V2_2560x1440-50dbecaa32e134e246717f8a5e60ad25" + }, + { + "type": "ProductLogo", + "url": "https://cdn1.epicgames.com/offer/0712c5eca64b47bbbced82cabba9f0d7/EGSBundle_Logo_V2_400x400-99dcb7d141728efbe2b1b4e993ce6339" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/0712c5eca64b47bbbced82cabba9f0d7/EGSBundle_Portrait_V2_1200x1600-981ac683de50fd5afed2c87dbc26494a" + } + ], + "seller": { + "id": "o-kk34ewvmscclj5a2ukx49ff6qknn7a", + "name": "Ten Hut Games" + }, + "productSlug": "qube-ultimate-bundle", + "urlSlug": "qube-ultimate-bundle", + "url": null, + "items": [ + { + "id": "11d229f51ac1445a8925b8d14da82b9b", + "namespace": "ad43401ad02840c2b2bee5f1f1a59988" + }, + { + "id": "0e7ec1d579ab481c93dff6056c19299f", + "namespace": "4b5f1eb366dc45f0920d397c01b291ba" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.productSlug", + "value": "qube-ultimate-bundle" + } + ], + "categories": [ + { + "path": "bundles" + }, + { + "path": "freegames" + }, + { + "path": "bundles/games" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "1298" + }, + { + "id": "1203" + }, + { + "id": "1117" + }, + { + "id": "1294" + } + ], + "catalogNs": { + "mappings": null + }, + "offerMappings": null, + "price": { + "totalPrice": { + "discountPrice": 4499, + "originalPrice": 4499, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "44,99\u00a0\u20ac", + "discountPrice": "44,99\u00a0\u20ac", + "intermediatePrice": "44,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-12T15:00:00.000Z", + "endDate": "2023-10-19T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + }, + { + "title": "PAYDAY 2", + "id": "de434b7be57940d98ede93b50cdacfc2", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "PAYDAY 2 is an action-packed, four-player co-op shooter that once again lets gamers don the masks of the original PAYDAY crew - Dallas, Hoxton, Wolf and Chains - as they descend on Washington DC for an epic crime spree.", + "effectiveDate": "2099-01-01T00:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "viewableDate": "2023-06-01T14:25:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/mammoth-h1nvv_2560x1440-ac346d6ece5ec356561e112fbddb2dc1" + }, + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/EN-mega-sale-vault-16x9-asset_1920x1080-a27cf3919dde320a72936374a1d47813" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "payday-2-c66369", + "urlSlug": "mystery-game-7", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "sales-and-specials/mega-sale" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "payday-2-c66369" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "Blazing Sails", + "id": "363d0be3b57d4741a046d38da0e6355e", + "namespace": "aee7dd76aa6746578f476dc47f8d1d7f", + "description": "Survivez \u00e0 Blazing Sails, un jeu de pirate en JcJ tr\u00e9pidant\u00a0! Cr\u00e9ez votre navire et vos pirates uniques. Naviguez en \u00e9quipe avec d'autres joueurs\u00a0! D\u00e9couvrez diff\u00e9rents modes de jeu, cartes, armes, types de navires et bien plus encore. Battez les \u00e9quipages adverses dans d'\u00e9piques combats sur terre et en mer\u00a0!", + "effectiveDate": "2099-04-06T17:35:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "viewableDate": "2023-03-30T15:00:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/aee7dd76aa6746578f476dc47f8d1d7f/EGS_BlazingSails_GetUpGames_S2_1200x1600-bae3831e97b560958dc785e830ebed8c" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/aee7dd76aa6746578f476dc47f8d1d7f/EGS_BlazingSails_GetUpGames_S1_2560x1440-fd7a7b3d357555880cb7969634553c5b" + }, + { + "type": "ProductLogo", + "url": "https://cdn1.epicgames.com/offer/aee7dd76aa6746578f476dc47f8d1d7f/EGS_BlazingSails_GetUpGames_IC1_400x400-a7b91f257fcbd9ced825d3da95298170" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/aee7dd76aa6746578f476dc47f8d1d7f/EGS_BlazingSails_GetUpGames_S2_1200x1600-bae3831e97b560958dc785e830ebed8c" + } + ], + "seller": { + "id": "o-ftmts7pjfvdywkby885rdzl4hdbtys", + "name": "Iceberg Interactive" + }, + "productSlug": "blazing-sails", + "urlSlug": "blazing-sails", + "url": null, + "items": [ + { + "id": "30aec28f450a41499dd27e0d27294b56", + "namespace": "aee7dd76aa6746578f476dc47f8d1d7f" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "KR" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "blazing-sails" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "1264" + }, + { + "id": "1203" + }, + { + "id": "9547" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "blazing-sails", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1479, + "originalPrice": 1479, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "14,79\u00a0\u20ac", + "discountPrice": "14,79\u00a0\u20ac", + "intermediatePrice": "14,79\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-10-12T15:00:00.000Z", + "endDate": "2023-10-19T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + } + ], + "paging": { + "count": 1000, + "total": 7 + } + } + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/fixtures/error_5222_wrong_country.json b/tests/components/epic_games_store/fixtures/error_5222_wrong_country.json new file mode 100644 index 00000000000..c91d5551ff9 --- /dev/null +++ b/tests/components/epic_games_store/fixtures/error_5222_wrong_country.json @@ -0,0 +1,23 @@ +{ + "errors": [ + { + "message": "CatalogQuery/searchStore: Request failed with status code 400", + "locations": [ + { + "line": 18, + "column": 9 + } + ], + "correlationId": "e10ad58e-a4f9-4097-af5d-cafdbe0d8bbd", + "serviceResponse": "{\"errorCode\":\"errors.com.epicgames.catalog.invalid_country_code\",\"errorMessage\":\"Sorry the value you entered: en-US, does not appear to be a valid ISO country code.\",\"messageVars\":[\"en-US\"],\"numericErrorCode\":5222,\"originatingService\":\"com.epicgames.catalog.public\",\"intent\":\"prod\",\"errorStatus\":400}", + "stack": null, + "path": ["Catalog", "searchStore"] + } + ], + "data": { + "Catalog": { + "searchStore": null + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/fixtures/free_games.json b/tests/components/epic_games_store/fixtures/free_games.json new file mode 100644 index 00000000000..29ff43f32a0 --- /dev/null +++ b/tests/components/epic_games_store/fixtures/free_games.json @@ -0,0 +1,2189 @@ +{ + "data": { + "Catalog": { + "searchStore": { + "elements": [ + { + "title": "Rising Storm 2: Vietnam", + "id": "b19d810d322240e7b37bcf84ffac60ce", + "namespace": "3542a1df211e492bb2abecb7c734f7f9", + "description": "Red Orchestra Series' take on Vietnam: 64-player MP matches; 20+ maps; US Army & Marines, PAVN/NVA, NLF/VC; Australians and ARVN forces; 50+ weapons; 4 flyable helicopters; mines, traps and tunnels; Brutal. Authentic. Gritty. Character customization.", + "effectiveDate": "2020-10-08T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S3-2560x1440-e08edd93cb71bf15b50a74f3de2d17b0.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S4-1200x1600-5e3b2f8107e17cc008237e52761d67e5.jpg" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S3-2560x1440-e08edd93cb71bf15b50a74f3de2d17b0.jpg" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S4-1200x1600-5e3b2f8107e17cc008237e52761d67e5.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S4-1200x1600-5e3b2f8107e17cc008237e52761d67e5.jpg" + }, + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/3542a1df211e492bb2abecb7c734f7f9/offer/EGS_RisingStorm2Vietnam_AntimatterGamesTripwireInteractive_S4-1200x1600-5e3b2f8107e17cc008237e52761d67e5.jpg" + } + ], + "seller": { + "id": "o-2baznhy8tfh7fmyb55ul656v7ggt7r", + "name": "Tripwire Interactive" + }, + "productSlug": "rising-storm-2-vietnam/home", + "urlSlug": "risingstorm2vietnam", + "url": null, + "items": [ + { + "id": "685765c3f37049c49b45bea4173725d2", + "namespace": "3542a1df211e492bb2abecb7c734f7f9" + }, + { + "id": "c7c6d65ac4cc4ef0ae12e8e89f134684", + "namespace": "3542a1df211e492bb2abecb7c734f7f9" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "publisherName", + "value": "Tripwire Interactive" + }, + { + "key": "developerName", + "value": "Antimatter Games" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "rising-storm-2-vietnam/home" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "21122" + }, + { + "id": "21125" + }, + { + "id": "21129" + }, + { + "id": "14346" + }, + { + "id": "9547" + }, + { + "id": "16011" + }, + { + "id": "15375" + }, + { + "id": "21135" + }, + { + "id": "21138" + }, + { + "id": "1299" + }, + { + "id": "16979" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "17493" + }, + { + "id": "21141" + }, + { + "id": "22485" + }, + { + "id": "18777" + }, + { + "id": "18778" + }, + { + "id": "1115" + }, + { + "id": "21148" + }, + { + "id": "21149" + }, + { + "id": "14944" + }, + { + "id": "19242" + }, + { + "id": "18607" + }, + { + "id": "1203" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "rising-storm-2-vietnam", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 2199, + "originalPrice": 2199, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac21.99", + "discountPrice": "\u20ac21.99", + "intermediatePrice": "\u20ac21.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-11-03T15:00:00.000Z", + "endDate": "2022-11-10T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + }, + { + "title": "Idle Champions of the Forgotten Realms", + "id": "a9748abde1c94b66aae5250bb9fc5503", + "namespace": "7e508f543b05465abe3a935960eb70ac", + "description": "Idle Champions is a licensed Dungeons & Dragons strategy management video game uniting iconic characters from novels, campaigns, and shows into one epic adventure.", + "effectiveDate": "2021-02-16T17:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S2_1200x1600-dd9a8f25ad56089231f43cf639bde217" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S1_2560x1440-e2a1ffd224f443594d5deff3a47a45e2" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S2_1200x1600-dd9a8f25ad56089231f43cf639bde217" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S2_1200x1600-dd9a8f25ad56089231f43cf639bde217" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/7e508f543b05465abe3a935960eb70ac/EGS_IdleChampionsoftheForgottenRealms_CodenameEntertainment_S1_2560x1440-e2a1ffd224f443594d5deff3a47a45e2" + } + ], + "seller": { + "id": "o-3kpjwtwqwfl2p9wdwvpad7yqz4kt6c", + "name": "Codename Entertainment" + }, + "productSlug": "idle-champions-of-the-forgotten-realms", + "urlSlug": "banegeneralaudience", + "url": null, + "items": [ + { + "id": "9a4e1a1eb6b140f6a9e5e4dcb5a2bf55", + "namespace": "7e508f543b05465abe3a935960eb70ac" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "KR" + }, + { + "key": "publisherName", + "value": "Codename Entertainment" + }, + { + "key": "developerName", + "value": "Codename Entertainment" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "idle-champions-of-the-forgotten-realms" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "21136" + }, + { + "id": "21122" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "1188" + }, + { + "id": "1141" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "9547" + }, + { + "id": "21149" + }, + { + "id": "21119" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "idle-champions-of-the-forgotten-realms", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Hundred Days - Winemaking Simulator", + "id": "141eee80fbe041d48e16e7b998829295", + "namespace": "4d8b727a49144090b103f6b6ba471e71", + "description": "Winemaking could be your best adventure. Make the best wine interacting with soil and nature and take your winery to the top. Your beautiful journey into the winemaking tradition starts now.", + "effectiveDate": "2021-05-13T14:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_G1C_00-1920x1080-0ffeb0645f0badb615627b481b4a913e.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S2-1200x1600-35531ec1fa868e3876fac76471a24017.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S2-1200x1600-35531ec1fa868e3876fac76471a24017.jpg" + }, + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S2-1200x1600-35531ec1fa868e3876fac76471a24017.jpg" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S1-2560x1440-8f0dd95b6027cd1243361d430b3bf552.jpg" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/4d8b727a49144090b103f6b6ba471e71/offer/EGS_HundredDaysWinemakingSimulatorDEMO_BrokenArmsGames_Demo_S2-1200x1600-35531ec1fa868e3876fac76471a24017.jpg" + } + ], + "seller": { + "id": "o-ty5rvlnsbgdnfffytsywat86gcedkm", + "name": "Broken Arms Games srls" + }, + "productSlug": "hundred-days-winemaking-simulator", + "urlSlug": "hundred-days-winemaking-simulator", + "url": null, + "items": [ + { + "id": "03cacb8754f243bfbc536c9dda0eb32e", + "namespace": "4d8b727a49144090b103f6b6ba471e71" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "developerName", + "value": "Broken Arms Games" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "hundred-days-winemaking-simulator" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1188" + }, + { + "id": "21894" + }, + { + "id": "21127" + }, + { + "id": "19242" + }, + { + "id": "21130" + }, + { + "id": "16011" + }, + { + "id": "9547" + }, + { + "id": "1263" + }, + { + "id": "15375" + }, + { + "id": "18607" + }, + { + "id": "1393" + }, + { + "id": "21138" + }, + { + "id": "16979" + }, + { + "id": "21140" + }, + { + "id": "17493" + }, + { + "id": "21141" + }, + { + "id": "18777" + }, + { + "id": "1370" + }, + { + "id": "18778" + }, + { + "id": "21146" + }, + { + "id": "1115" + }, + { + "id": "21149" + }, + { + "id": "10719" + }, + { + "id": "21119" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "hundred-days-winemaking-simulator", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1999, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac19.99", + "intermediatePrice": "\u20ac19.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "Shadow of the Tomb Raider: Definitive Edition", + "id": "ee7f3c6725fd4fd4b8aeab8622cb770e", + "namespace": "4b5461ca8d1c488787b5200b420de066", + "description": "In Shadow of the Tomb Raider Definitive Edition experience the final chapter of Lara\u2019s origin as she is forged into the Tomb Raider she is destined to be.", + "effectiveDate": "2021-12-30T16:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s4-1200x1600-7ee40d6fa744_1200x1600-950cdb624cc75d04fe3c8c0b62ce98de" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s4-1200x1600-7ee40d6fa744_1200x1600-950cdb624cc75d04fe3c8c0b62ce98de" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s1-2560x1440-eca6506e95a1_2560x1440-193582a5fd76a593804e0171d6395cf4" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s4-1200x1600-7ee40d6fa744_1200x1600-950cdb624cc75d04fe3c8c0b62ce98de" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s4-1200x1600-7ee40d6fa744_1200x1600-950cdb624cc75d04fe3c8c0b62ce98de" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/4b5461ca8d1c488787b5200b420de066/egs-shadowofthetombraiderdefinitiveedition-eidosmontralcrystaldynamicsnixxessoftware-s1-2560x1440-eca6506e95a1_2560x1440-193582a5fd76a593804e0171d6395cf4" + } + ], + "seller": { + "id": "o-7petn7mrlk8g86ktqm7uglcr7lfaja", + "name": "Square Enix" + }, + "productSlug": "shadow-of-the-tomb-raider", + "urlSlug": "shadow-of-the-tomb-raider", + "url": null, + "items": [ + { + "id": "e7f90759e0544e42be9391d10a5c6000", + "namespace": "4b5461ca8d1c488787b5200b420de066" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "shadow-of-the-tomb-raider" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "21122" + }, + { + "id": "18051" + }, + { + "id": "1188" + }, + { + "id": "21894" + }, + { + "id": "21127" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21109" + }, + { + "id": "21141" + }, + { + "id": "22485" + }, + { + "id": "1370" + }, + { + "id": "21146" + }, + { + "id": "1117" + }, + { + "id": "21149" + }, + { + "id": "21119" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "shadow-of-the-tomb-raider", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1319, + "originalPrice": 3999, + "voucherDiscount": 0, + "discount": 2680, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac39.99", + "discountPrice": "\u20ac13.19", + "intermediatePrice": "\u20ac13.19" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "35111a3c715340d08910a9f6a5b3e846", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-18T15:00:00.000Z", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 33 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Terraforming Mars", + "id": "f2496286331e405793d69807755b7b23", + "namespace": "25d726130e6c4fe68f88e71933bda955", + "description": "The taming of the Red Planet has begun!\n\nControl your corporation, play project cards, build up production, place your cities and green areas on the map, and race for milestones and awards!\n\nWill your corporation lead the way into humanity's new era?", + "effectiveDate": "2022-05-05T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/5199b206e46947ebad5e5c282e95776f/terraforming-mars-offer-1j70f.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/5199b206e46947ebad5e5c282e95776f/download-terraforming-mars-offer-13t2e.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/5199b206e46947ebad5e5c282e95776f/download-terraforming-mars-offer-13t2e.jpg" + } + ], + "seller": { + "id": "o-4x4bpaww55p5g3f6xpyqe2cneqxd5d", + "name": "Asmodee" + }, + "productSlug": null, + "urlSlug": "24cdfcde68bf4a7e8b8618ac2c0c460b", + "url": null, + "items": [ + { + "id": "ee49486d7346465dba1f1dec85725aee", + "namespace": "25d726130e6c4fe68f88e71933bda955" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "18051" + }, + { + "id": "1188" + }, + { + "id": "21125" + }, + { + "id": "1386" + }, + { + "id": "9547" + }, + { + "id": "21138" + }, + { + "id": "1203" + }, + { + "id": "1299" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "21148" + }, + { + "id": "21149" + }, + { + "id": "10719" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "terraforming-mars-18c3ad", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "terraforming-mars-18c3ad", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1399, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 600, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac13.99", + "intermediatePrice": "\u20ac13.99" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "8e9732952e714f6583416e66fc451cd7", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-18T15:00:00.000Z", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 70 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Car Mechanic Simulator 2018", + "id": "5eb27cf1747c40b5a0d4f5492774678d", + "namespace": "226306adde104c9092247dcd4bfa1499", + "description": "Build and expand your repair service empire in this incredibly detailed and highly realistic simulation game, where attention to car detail is astonishing. Find classic, unique cars in the new Barn Find module and Junkyard module.", + "effectiveDate": "2022-06-23T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/226306adde104c9092247dcd4bfa1499/EGS_CarMechanicSimulator2018_RedDotGames_S2_1200x1600-f285924f9144353f57ac4631f0c689e6" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/226306adde104c9092247dcd4bfa1499/EGS_CarMechanicSimulator2018_RedDotGames_S1_2560x1440-3489ef1499e64c168fdf4b14926d2c23" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/226306adde104c9092247dcd4bfa1499/EGS_CarMechanicSimulator2018_RedDotGames_S2_1200x1600-f285924f9144353f57ac4631f0c689e6" + } + ], + "seller": { + "id": "o-5n5cbrasl5yzexjc529rypg8eh8lfb", + "name": "PlayWay" + }, + "productSlug": "car-mechanic-simulator-2018", + "urlSlug": "car-mechanic-simulator-2018", + "url": null, + "items": [ + { + "id": "49a3a8597c4240ecaf1f9068106c9869", + "namespace": "226306adde104c9092247dcd4bfa1499" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "car-mechanic-simulator-2018" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "21120" + }, + { + "id": "1188" + }, + { + "id": "21127" + }, + { + "id": "9547" + }, + { + "id": "1393" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1370" + }, + { + "id": "21146" + }, + { + "id": "21148" + }, + { + "id": "21149" + }, + { + "id": "21119" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "car-mechanic-simulator-2018", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1599, + "originalPrice": 1599, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac15.99", + "discountPrice": "\u20ac15.99", + "intermediatePrice": "\u20ac15.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "A Game Of Thrones: The Board Game Digital Edition", + "id": "a125d72a47a1490aba78c4e79a40395d", + "namespace": "1b737464d3c441f8956315433be02d3b", + "description": "It is the digital adaptation of the top-selling strategy board game from Fantasy Flight Games.", + "effectiveDate": "2022-06-23T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/61c1413e3db0423f9ddd4a5edbee717e/a-game-of-thrones-offer-11gxu.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/61c1413e3db0423f9ddd4a5edbee717e/download-a-game-of-thrones-offer-1q8ei.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/61c1413e3db0423f9ddd4a5edbee717e/download-a-game-of-thrones-offer-1q8ei.jpg" + } + ], + "seller": { + "id": "o-4x4bpaww55p5g3f6xpyqe2cneqxd5d", + "name": "Asmodee" + }, + "productSlug": null, + "urlSlug": "ce6f7ab4edab4cc2aa7e0ff4c19540e2", + "url": null, + "items": [ + { + "id": "dc6ae31efba7401fa72ed93f0bd37c6a", + "namespace": "1b737464d3c441f8956315433be02d3b" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "18051" + }, + { + "id": "1188" + }, + { + "id": "21125" + }, + { + "id": "9547" + }, + { + "id": "21138" + }, + { + "id": "1203" + }, + { + "id": "1299" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "21149" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "a-game-of-thrones-5858a3", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "a-game-of-thrones-5858a3", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1399, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 600, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac13.99", + "intermediatePrice": "\u20ac13.99" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "689de276cf3245a7bffdfa0d20500150", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-18T15:00:00.000Z", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 70 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Filament", + "id": "296453e71c884f95aecf4d582cf66915", + "namespace": "89fb09a222a54e53b692e9c36e68d0a1", + "description": "Solve challenging cable-based puzzles and uncover what really happened to the crew of The Alabaster. Now with Hint System (for those ultra tricky puzzles).", + "effectiveDate": "2022-08-11T11:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/5a72e62648d747189d2f5e7abb47444c/filament-offer-qrwye.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/5a72e62648d747189d2f5e7abb47444c/download-filament-offer-mk58q.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/5a72e62648d747189d2f5e7abb47444c/download-filament-offer-mk58q.jpg" + } + ], + "seller": { + "id": "o-fnqgc5v2xczx9fgawvcejwj88z2mnx", + "name": "Kasedo Games Ltd" + }, + "productSlug": null, + "urlSlug": "323de464947e4ee5a035c525b6b78021", + "url": null, + "items": [ + { + "id": "d4fa1325ef014725a89cc40e9b99e43d", + "namespace": "89fb09a222a54e53b692e9c36e68d0a1" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "1298" + }, + { + "id": "21894" + }, + { + "id": "19847" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "1263" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "filament-332a92", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "filament-332a92", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1699, + "originalPrice": 1699, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac16.99", + "discountPrice": "\u20ac16.99", + "intermediatePrice": "\u20ac16.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-11-03T15:00:00.000Z", + "endDate": "2022-11-10T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + }, + { + "title": "Warhammer 40,000: Mechanicus - Standard Edition", + "id": "559b16fa81134dce83b5b8b7cf67b5b3", + "namespace": "144f9e231e2846d1a4381d9bb678f69d", + "description": "Take control of the most technologically advanced army in the Imperium - The Adeptus Mechanicus. Your every decision will weigh heavily on the outcome of the mission, in this turn-based tactical game. Will you be blessed by the Omnissiah?", + "effectiveDate": "2022-08-11T11:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/d26f2f9ea65c462dbd39040ae8389d36/warhammer-mechanicus-offer-17fnz.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/d26f2f9ea65c462dbd39040ae8389d36/download-warhammer-mechanicus-offer-1f6bv.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/d26f2f9ea65c462dbd39040ae8389d36/download-warhammer-mechanicus-offer-1f6bv.jpg" + } + ], + "seller": { + "id": "o-fnqgc5v2xczx9fgawvcejwj88z2mnx", + "name": "Kasedo Games Ltd" + }, + "productSlug": null, + "urlSlug": "f37159d9bd96489ab1b99bdad1ee796c", + "url": null, + "items": [ + { + "id": "f923ad9f3428472ab67baa4618c205a0", + "namespace": "144f9e231e2846d1a4381d9bb678f69d" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "21894" + }, + { + "id": "19847" + }, + { + "id": "1386" + }, + { + "id": "1115" + }, + { + "id": "9547" + }, + { + "id": "9549" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "warhammer-mechanicus-0e4b71", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "warhammer-mechanicus-0e4b71", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 2999, + "voucherDiscount": 0, + "discount": 2999, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac29.99", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "7a3ee39632f5458990b6a9ad295881b8", + "endDate": "2022-11-03T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-27T15:00:00.000Z", + "endDate": "2022-11-03T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Fallout 3: Game of the Year Edition", + "id": "d6f01b1827c64ed388191ae507fe7c1b", + "namespace": "fa702d34a37248ba98fb17f680c085e3", + "description": "Prepare for the Future\u2122\nExperience the most acclaimed game of 2008 like never before with Fallout 3: Game of the Year Edition. Create a character of your choosing and descend into a post-apocalyptic world where every minute is a fight for survival", + "effectiveDate": "2022-10-20T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/fa702d34a37248ba98fb17f680c085e3/EGS_Fallout3GameoftheYearEdition_BethesdaGameStudios_S2_1200x1600-e2ba392652a1f57c4feb65d6bbd1f963" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/fa702d34a37248ba98fb17f680c085e3/EGS_Fallout3GameoftheYearEdition_BethesdaGameStudios_S1_2560x1440-073f5b4cf358f437a052a3c29806efa0" + }, + { + "type": "ProductLogo", + "url": "https://cdn1.epicgames.com/offer/fa702d34a37248ba98fb17f680c085e3/EGS_Fallout3GameoftheYearEdition_BethesdaGameStudios_IC1_400x400-5e37dfe1d35c9ccf25c8889fe7218613" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/fa702d34a37248ba98fb17f680c085e3/EGS_Fallout3GameoftheYearEdition_BethesdaGameStudios_S2_1200x1600-e2ba392652a1f57c4feb65d6bbd1f963" + } + ], + "seller": { + "id": "o-bthbhn6wd7fzj73v5p4436ucn3k37u", + "name": "Bethesda Softworks LLC" + }, + "productSlug": "fallout-3-game-of-the-year-edition", + "urlSlug": "fallout-3-game-of-the-year-edition", + "url": null, + "items": [ + { + "id": "6b750e631e414927bde5b3e13b647443", + "namespace": "fa702d34a37248ba98fb17f680c085e3" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "fallout-3-game-of-the-year-edition" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "21122" + }, + { + "id": "1188" + }, + { + "id": "21894" + }, + { + "id": "21127" + }, + { + "id": "9547" + }, + { + "id": "21137" + }, + { + "id": "21138" + }, + { + "id": "21139" + }, + { + "id": "21140" + }, + { + "id": "21141" + }, + { + "id": "1367" + }, + { + "id": "1370" + }, + { + "id": "1307" + }, + { + "id": "21147" + }, + { + "id": "21148" + }, + { + "id": "1117" + }, + { + "id": "21149" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "fallout-3-game-of-the-year-edition", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 659, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 1340, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac6.59", + "intermediatePrice": "\u20ac6.59" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "779554ee7a604b0091a4335a60b6e55a", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-27T15:00:00.000Z", + "endDate": "2022-11-01T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 33 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Evoland Legendary Edition", + "id": "e068e168886a4a90a4e36a310e3bda32", + "namespace": "3f7bd21610f743e598fa8e955500f5b7", + "description": "Evoland Legendary Edition brings you two great and unique RPGs, with their graphic style and gameplay changing as you progress through the game!", + "effectiveDate": "2022-10-20T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/aafde465b31e4bd5a169ff1c8a164a17/evoland-legendary-edition-1y7m0.png" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/aafde465b31e4bd5a169ff1c8a164a17/evoland-legendary-edition-1j93v.png" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/aafde465b31e4bd5a169ff1c8a164a17/evoland-legendary-edition-1j93v.png" + } + ], + "seller": { + "id": "o-ealhln64lfep9ww929uq9qcdmbyfn4", + "name": "Shiro Games SAS" + }, + "productSlug": null, + "urlSlug": "224c60bb93864e1c8a1900bcf7d661dd", + "url": null, + "items": [ + { + "id": "c829f27d0ab0406db8edf2b97562ee93", + "namespace": "3f7bd21610f743e598fa8e955500f5b7" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition" + }, + { + "path": "games" + }, + { + "path": "games/edition/base" + } + ], + "tags": [ + { + "id": "1216" + }, + { + "id": "21109" + }, + { + "id": "1367" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "1117" + }, + { + "id": "9549" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "evoland-legendary-edition-5753ec", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "evoland-legendary-edition-5753ec", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 1999, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "\u20ac19.99", + "intermediatePrice": "\u20ac19.99" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "Saturnalia", + "id": "275d5915ebd2479f983f51025b22a1b8", + "namespace": "c749cd78da34408d8434a46271f4bb79", + "description": "A Survival Horror Adventure: as an ensemble cast, explore an isolated village of ancient ritual \u2013 its labyrinthine roads change each time you lose all your characters.", + "effectiveDate": "2022-10-27T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S4_1200x1600-2216ff4aa6997dfb13d8bd4c6f2fa99e" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S4_1200x1600-2216ff4aa6997dfb13d8bd4c6f2fa99e" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S3_2560x1440-3cd916a7260b77c8488f8f2b0f3a51ab" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S4_1200x1600-2216ff4aa6997dfb13d8bd4c6f2fa99e" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S3_2560x1440-3cd916a7260b77c8488f8f2b0f3a51ab" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/c749cd78da34408d8434a46271f4bb79/EGS_Saturnalia_SantaRagione_S4_1200x1600-2216ff4aa6997dfb13d8bd4c6f2fa99e" + } + ], + "seller": { + "id": "o-cjwnkas5rn476tzk72fbh2ftutnc2y", + "name": "Santa Ragione" + }, + "productSlug": "saturnalia", + "urlSlug": "saturnalia", + "url": null, + "items": [ + { + "id": "dbce8ecb6923490c9404529651251216", + "namespace": "c749cd78da34408d8434a46271f4bb79" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.productSlug", + "value": "saturnalia" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1218" + }, + { + "id": "19847" + }, + { + "id": "1080" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "1117" + }, + { + "id": "10719" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "saturnalia", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 1999, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "\u20ac19.99", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "8fa8f62eac9e4cab9fe242987c0f0988", + "endDate": "2022-11-03T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2022-10-27T15:00:00.000Z", + "endDate": "2022-11-03T15:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Maneater", + "id": "a22a7af179c54b86a93f3193ace8f7f4", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Maneater", + "effectiveDate": "2099-01-01T00:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-tease-generic-promo-1920x1080_1920x1080-f7742c265e217510835ed14e04c48b4b" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-tease-generic-promo-1920x1080_1920x1080-f7742c265e217510835ed14e04c48b4b" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-carousel-mobile-thumbnail-1200x1600_1200x1600-1f45bf1ceb21c1ca2947f6df5ece5346" + }, + { + "type": "VaultOpened", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w4-1920x1080_1920x1080-2df36fe63c18ff6fcb5febf3dd7ed06e" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w4-1920x1080_1920x1080-2df36fe63c18ff6fcb5febf3dd7ed06e" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w4-1920x1080_1920x1080-2df36fe63c18ff6fcb5febf3dd7ed06e" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "maneater", + "urlSlug": "game-4", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "free-games" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "maneater" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + }, + { + "title": "Wolfenstein: The New Order", + "id": "1d41b93230e54bdd80c559d72adb7f4f", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Wolfenstein: The New Order", + "effectiveDate": "2099-01-01T00:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-tease-generic-promo-1920x1080_1920x1080-f7742c265e217510835ed14e04c48b4b" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-carousel-mobile-thumbnail-1200x1600_1200x1600-1f45bf1ceb21c1ca2947f6df5ece5346" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w3-1920x1080_1920x1080-4a501d33fb4ac641e3e1e290dcc0e6c1" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w3-1920x1080_1920x1080-4a501d33fb4ac641e3e1e290dcc0e6c1" + }, + { + "type": "VaultOpened", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/egs-vault-w3-1920x1080_1920x1080-4a501d33fb4ac641e3e1e290dcc0e6c1" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "wolfenstein-the-new-order", + "urlSlug": "game-3", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "free-games" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "wolfenstein-the-new-order" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + } + ], + "paging": { + "count": 1000, + "total": 14 + } + } + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/fixtures/free_games_christmas_special.json b/tests/components/epic_games_store/fixtures/free_games_christmas_special.json new file mode 100644 index 00000000000..0c65f47d3a0 --- /dev/null +++ b/tests/components/epic_games_store/fixtures/free_games_christmas_special.json @@ -0,0 +1,253 @@ +{ + "data": { + "Catalog": { + "searchStore": { + "elements": [ + { + "title": "Cursed to Golf", + "id": "0e4551e4ae65492b88009f8a4e41d778", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Cursed to Golf", + "effectiveDate": "2023-12-27T16:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": "2023-12-28T16:00:00.000Z", + "viewableDate": "2023-12-26T15:25:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/Free-Game-9_1920x1080-418a8fa10dd305bb2a219a7ec869c5ef" + }, + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/Free-Game-9-teaser_1920x1080-e71ae0041736db5ac259a355cb301116" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "cursed-to-golf-a6bc22", + "urlSlug": "mysterygame-9", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "sales-and-specials/holiday-sale" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "cursed-to-golf-a6bc22" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-12-27T16:00:00.000Z", + "endDate": "2023-12-28T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + }, + { + "startDate": "2023-12-27T16:00:00.000Z", + "endDate": "2023-12-28T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [] + } + }, + { + "title": "Mystery Game Day 10", + "id": "a8c3537a579943a688e3bd355ae36209", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Mystery Game Day 10", + "effectiveDate": "2099-01-01T16:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "viewableDate": "2023-12-27T15:25:00.000Z", + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/Free-Game-10-teaser_1920x1080-3ea48042a44263bf1a0a59c725b6d95b" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/Free-Game-10-teaser_1920x1080-3ea48042a44263bf1a0a59c725b6d95b" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "[]", + "urlSlug": "mysterygame-10", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "sales-and-specials/holiday-sale" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "[]" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-12-28T16:00:00.000Z", + "endDate": "2023-12-29T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ] + } + } + ], + "paging": { + "count": 1000, + "total": 2 + } + } + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/fixtures/free_games_one.json b/tests/components/epic_games_store/fixtures/free_games_one.json new file mode 100644 index 00000000000..48cd64f68d4 --- /dev/null +++ b/tests/components/epic_games_store/fixtures/free_games_one.json @@ -0,0 +1,658 @@ +{ + "data": { + "Catalog": { + "searchStore": { + "elements": [ + { + "title": "Borderlands 3 Season Pass", + "id": "c3913a91e07b43cfbbbcfd8244c86dcc", + "namespace": "catnip", + "description": "Prolongez votre aventure dans Borderlands\u00a03 avec le Season Pass, regroupant des \u00e9l\u00e9ments cosm\u00e9tiques exclusifs et quatre histoires additionnelles, pour encore plus de missions et de d\u00e9fis\u00a0!", + "effectiveDate": "2019-09-11T12:00:00.000Z", + "offerType": "DLC", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/offer/catnip/Diesel_productv2_borderlands-3_season-pass_BL3_SEASONPASS_Hero-3840x2160-4411e63a005a43811a2bc516ae7ec584598fd4aa-3840x2160-b8988ebb0f3d9159671e8968af991f30_3840x2160-b8988ebb0f3d9159671e8968af991f30" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/offer/catnip/2KGMKT_BL3_Season_Pass_EGS_1200x1600_1200x1600-a7438a079c5576d328a74b9121278075" + }, + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/offer/catnip/2KGMKT_BL3_Season_Pass_EGS_1200x1600_1200x1600-a7438a079c5576d328a74b9121278075" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/offer/catnip/2KGMKT_BL3_Season_Pass_EGS_1200x1600_1200x1600-a7438a079c5576d328a74b9121278075" + } + ], + "seller": { + "id": "o-37m6jbj5wcvrcvm4wusv7nazdfvbjk", + "name": "2K Games, Inc." + }, + "productSlug": "borderlands-3/season-pass", + "urlSlug": "borderlands-3--season-pass", + "url": null, + "items": [ + { + "id": "e9fdc1a9f47b4a5e8e63841c15de2b12", + "namespace": "catnip" + }, + { + "id": "fbc46bb6056940d2847ee1e80037a9af", + "namespace": "catnip" + }, + { + "id": "ff8e1152ddf742b68f9ac0cecd378917", + "namespace": "catnip" + }, + { + "id": "939e660825764e208938ab4f26b4da56", + "namespace": "catnip" + }, + { + "id": "4c43a9a691114ccd91c1884ab18f4e27", + "namespace": "catnip" + }, + { + "id": "3a6a3f9b351b4b599808df3267669b83", + "namespace": "catnip" + }, + { + "id": "ab030a9f53f3428fb2baf2ddbb0bb5ac", + "namespace": "catnip" + }, + { + "id": "ff96eef22b0e4c498e8ed80ac0030325", + "namespace": "catnip" + }, + { + "id": "5021e93a73374d6db1c1ce6c92234f8f", + "namespace": "catnip" + }, + { + "id": "9c0b1eb3265340678dff0fcb106402b1", + "namespace": "catnip" + }, + { + "id": "8c826db6e14f44aeac8816e1bd593632", + "namespace": "catnip" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.blacklist", + "value": "SA" + }, + { + "key": "publisherName", + "value": "2K" + }, + { + "key": "developerName", + "value": "Gearbox Software" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "borderlands-3/season-pass" + } + ], + "categories": [ + { + "path": "addons" + }, + { + "path": "freegames" + }, + { + "path": "addons/durable" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1264" + }, + { + "id": "16004" + }, + { + "id": "14869" + }, + { + "id": "26789" + }, + { + "id": "1367" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "9549" + }, + { + "id": "1294" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "borderlands-3", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "borderlands-3--season-pass", + "pageType": "addon--cms-hybrid" + } + ], + "price": { + "totalPrice": { + "discountPrice": 4999, + "originalPrice": 4999, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "49,99\u00a0\u20ac", + "discountPrice": "49,99\u00a0\u20ac", + "intermediatePrice": "49,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 30 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 25 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 25 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 30 + } + } + ] + } + ] + } + }, + { + "title": "Call of the Sea", + "id": "92da5d8d918543b6b408e36d9af81765", + "namespace": "5e427319eea1401ab20c6cd78a4163c4", + "description": "Call of the Sea is an otherworldly tale of mystery and love set in the 1930s South Pacific. Explore a lush island paradise, solve puzzles and unlock secrets in the hunt for your husband\u2019s missing expedition.", + "effectiveDate": "2022-02-17T15:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S1_2560x1440-204699c6410deef9c18be0ee392f8335" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S2_1200x1600-db63acf0c479c185e0ef8f8e73c8f0d8" + }, + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S5_1920x1080-7b22dfebdd9fcdde6e526c5dc4c16eb1" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S2_1200x1600-db63acf0c479c185e0ef8f8e73c8f0d8" + }, + { + "type": "CodeRedemption_340x440", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S2_1200x1600-db63acf0c479c185e0ef8f8e73c8f0d8" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/salesEvent/salesEvent/EGS_CalloftheSea_OutoftheBlue_S2_1200x1600-db63acf0c479c185e0ef8f8e73c8f0d8" + } + ], + "seller": { + "id": "o-fay4ghw9hhamujs53rfhy83ffexb7k", + "name": "Raw Fury" + }, + "productSlug": "call-of-the-sea", + "urlSlug": "call-of-the-sea", + "url": null, + "items": [ + { + "id": "cbc9c76c4bfc4bc6b28abb3afbcbf07a", + "namespace": "5e427319eea1401ab20c6cd78a4163c4" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.productSlug", + "value": "call-of-the-sea" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "games/edition" + }, + { + "path": "games/edition/base" + }, + { + "path": "applications" + } + ], + "tags": [ + { + "id": "1296" + }, + { + "id": "1298" + }, + { + "id": "21894" + }, + { + "id": "1370" + }, + { + "id": "9547" + }, + { + "id": "1117" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "call-of-the-sea", + "pageType": "productHome" + } + ] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 1999, + "originalPrice": 1999, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "19,99\u00a0\u20ac", + "discountPrice": "19,99\u00a0\u20ac", + "intermediatePrice": "19,99\u00a0\u20ac" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": { + "promotionalOffers": [], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 60 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 60 + } + } + ] + } + ] + } + }, + { + "title": "Rise of Industry", + "id": "c04a2ab8ff4442cba0a41fb83453e701", + "namespace": "9f101e25b1a9427a9e6971d2b21c5f82", + "description": "Mettez vos comp\u00e9tences entrepreneuriales \u00e0 l'\u00e9preuve en cr\u00e9ant et en optimisant des cha\u00eenes de production complexes tout en gardant un \u0153il sur les r\u00e9sultats financiers. \u00c0 l'aube du 20e si\u00e8cle, appr\u00eatez-vous \u00e0 entrer dans un \u00e2ge d'or industriel, ou une d\u00e9pression historique.", + "effectiveDate": "2022-08-11T11:00:00.000Z", + "offerType": "BASE_GAME", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": false, + "keyImages": [ + { + "type": "OfferImageWide", + "url": "https://cdn1.epicgames.com/spt-assets/a6aeec29591b4b56b4383b4d2d7d0e1e/rise-of-industry-offer-1p22f.jpg" + }, + { + "type": "OfferImageTall", + "url": "https://cdn1.epicgames.com/spt-assets/a6aeec29591b4b56b4383b4d2d7d0e1e/download-rise-of-industry-offer-1uujr.jpg" + }, + { + "type": "Thumbnail", + "url": "https://cdn1.epicgames.com/spt-assets/a6aeec29591b4b56b4383b4d2d7d0e1e/download-rise-of-industry-offer-1uujr.jpg" + } + ], + "seller": { + "id": "o-fnqgc5v2xczx9fgawvcejwj88z2mnx", + "name": "Kasedo Games Ltd" + }, + "productSlug": null, + "urlSlug": "f88fedc022fe488caaedaa5c782ff90d", + "url": null, + "items": [ + { + "id": "9f5b48a778824e6aa330d2c1a47f41b2", + "namespace": "9f101e25b1a9427a9e6971d2b21c5f82" + } + ], + "customAttributes": [ + { + "key": "autoGeneratedPrice", + "value": "false" + }, + { + "key": "isManuallySetPCReleaseDate", + "value": "true" + } + ], + "categories": [ + { + "path": "freegames" + }, + { + "path": "games/edition/base" + }, + { + "path": "games/edition" + }, + { + "path": "games" + } + ], + "tags": [ + { + "id": "26789" + }, + { + "id": "19847" + }, + { + "id": "1370" + }, + { + "id": "1115" + }, + { + "id": "9547" + }, + { + "id": "10719" + } + ], + "catalogNs": { + "mappings": [ + { + "pageSlug": "rise-of-industry-0af838", + "pageType": "productHome" + } + ] + }, + "offerMappings": [ + { + "pageSlug": "rise-of-industry-0af838", + "pageType": "productHome" + } + ], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 2999, + "voucherDiscount": 0, + "discount": 2999, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "29,99\u00a0\u20ac", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [ + { + "id": "a19d30dc34f44923993e68b82b75a084", + "endDate": "2023-03-09T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE" + } + } + ] + } + ] + }, + "promotions": { + "promotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-03-02T16:00:00.000Z", + "endDate": "2023-03-09T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 0 + } + } + ] + } + ], + "upcomingPromotionalOffers": [ + { + "promotionalOffers": [ + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 25 + } + }, + { + "startDate": "2023-03-09T16:00:00.000Z", + "endDate": "2023-03-16T16:00:00.000Z", + "discountSetting": { + "discountType": "PERCENTAGE", + "discountPercentage": 25 + } + } + ] + } + ] + } + }, + { + "title": "Dishonored - Definitive Edition", + "id": "4d25d74b88d1474a8ab21ffb88ca6d37", + "namespace": "d5241c76f178492ea1540fce45616757", + "description": "Experience the definitive Dishonored collection. This complete compilation includes Dishonored as well as all of its additional content - Dunwall City Trials, The Knife of Dunwall, The Brigmore Witches and Void Walker\u2019s Arsenal.", + "effectiveDate": "2099-01-01T00:00:00.000Z", + "offerType": "OTHERS", + "expiryDate": null, + "status": "ACTIVE", + "isCodeRedemptionOnly": true, + "keyImages": [ + { + "type": "VaultClosed", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/15days-day15-wrapped-desktop-carousel-image_1920x1080-ebecfa7c79f02a9de5bca79560bee953" + }, + { + "type": "DieselStoreFrontWide", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/15days-day15-Unwrapped-desktop-carousel-image1_1920x1080-1992edb42bb8554ddeb14d430ba3f858" + }, + { + "type": "DieselStoreFrontTall", + "url": "https://cdn1.epicgames.com/offer/d5241c76f178492ea1540fce45616757/DAY15-carousel-mobile-unwrapped-image1_1200x1600-9716d77667d2a82931c55a4e4130989e" + } + ], + "seller": { + "id": "o-ufmrk5furrrxgsp5tdngefzt5rxdcn", + "name": "Epic Dev Test Account" + }, + "productSlug": "dishonored-definitive-edition", + "urlSlug": "mystery-game15", + "url": null, + "items": [ + { + "id": "8341d7c7e4534db7848cc428aa4cbe5a", + "namespace": "d5241c76f178492ea1540fce45616757" + } + ], + "customAttributes": [ + { + "key": "com.epicgames.app.freegames.vault.close", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.slug", + "value": "sales-and-specials/holiday-sale" + }, + { + "key": "com.epicgames.app.blacklist", + "value": "[]" + }, + { + "key": "com.epicgames.app.freegames.vault.open", + "value": "[]" + }, + { + "key": "com.epicgames.app.productSlug", + "value": "dishonored-definitive-edition" + } + ], + "categories": [ + { + "path": "freegames/vaulted" + }, + { + "path": "freegames" + }, + { + "path": "games" + }, + { + "path": "applications" + } + ], + "tags": [], + "catalogNs": { + "mappings": [] + }, + "offerMappings": [], + "price": { + "totalPrice": { + "discountPrice": 0, + "originalPrice": 0, + "voucherDiscount": 0, + "discount": 0, + "currencyCode": "EUR", + "currencyInfo": { + "decimals": 2 + }, + "fmtPrice": { + "originalPrice": "0", + "discountPrice": "0", + "intermediatePrice": "0" + } + }, + "lineOffers": [ + { + "appliedRules": [] + } + ] + }, + "promotions": null + } + ], + "paging": { + "count": 1000, + "total": 4 + } + } + } + }, + "extensions": {} +} diff --git a/tests/components/epic_games_store/test_calendar.py b/tests/components/epic_games_store/test_calendar.py new file mode 100644 index 00000000000..46ca974f85c --- /dev/null +++ b/tests/components/epic_games_store/test_calendar.py @@ -0,0 +1,162 @@ +"""Tests for the Epic Games Store calendars.""" + +from unittest.mock import Mock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.calendar import ( + DOMAIN as CALENDAR_DOMAIN, + EVENT_END_DATETIME, + EVENT_START_DATETIME, + SERVICE_GET_EVENTS, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from .common import setup_platform + +from tests.common import async_fire_time_changed + + +async def test_setup_component(hass: HomeAssistant, service_multiple: Mock) -> None: + """Test setup component.""" + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.name == "Epic Games Store Discount games" + state = hass.states.get("calendar.epic_games_store_free_games") + assert state.name == "Epic Games Store Free games" + + +async def test_discount_games( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_multiple: Mock, +) -> None: + """Test discount games calendar.""" + freezer.move_to("2022-10-15T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.state == STATE_OFF + + freezer.move_to("2022-10-30T00:00:00.000Z") + async_fire_time_changed(hass) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.state == STATE_ON + + cal_attrs = dict(state.attributes) + assert cal_attrs == { + "friendly_name": "Epic Games Store Discount games", + "message": "Shadow of the Tomb Raider: Definitive Edition", + "all_day": False, + "start_time": "2022-10-18 08:00:00", + "end_time": "2022-11-01 08:00:00", + "location": "", + "description": "In Shadow of the Tomb Raider Definitive Edition experience the final chapter of Lara\u2019s origin as she is forged into the Tomb Raider she is destined to be.\n\nhttps://store.epicgames.com/fr/p/shadow-of-the-tomb-raider", + } + + +async def test_free_games( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_multiple: Mock, +) -> None: + """Test free games calendar.""" + freezer.move_to("2022-10-30T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_free_games") + assert state.state == STATE_ON + + cal_attrs = dict(state.attributes) + assert cal_attrs == { + "friendly_name": "Epic Games Store Free games", + "message": "Warhammer 40,000: Mechanicus - Standard Edition", + "all_day": False, + "start_time": "2022-10-27 08:00:00", + "end_time": "2022-11-03 08:00:00", + "location": "", + "description": "Take control of the most technologically advanced army in the Imperium - The Adeptus Mechanicus. Your every decision will weigh heavily on the outcome of the mission, in this turn-based tactical game. Will you be blessed by the Omnissiah?\n\nhttps://store.epicgames.com/fr/p/warhammer-mechanicus-0e4b71", + } + + +async def test_attribute_not_found( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_attribute_not_found: Mock, +) -> None: + """Test setup calendars with attribute not found error.""" + freezer.move_to("2023-10-12T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.name == "Epic Games Store Discount games" + state = hass.states.get("calendar.epic_games_store_free_games") + assert state.name == "Epic Games Store Free games" + assert state.state == STATE_ON + + +async def test_christmas_special( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_christmas_special: Mock, +) -> None: + """Test setup calendars with Christmas special case.""" + freezer.move_to("2023-12-28T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + state = hass.states.get("calendar.epic_games_store_discount_games") + assert state.name == "Epic Games Store Discount games" + assert state.state == STATE_OFF + + state = hass.states.get("calendar.epic_games_store_free_games") + assert state.name == "Epic Games Store Free games" + assert state.state == STATE_ON + + +async def test_get_events( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_multiple: Mock, +) -> None: + """Test setup component with calendars.""" + freezer.move_to("2022-10-30T00:00:00.000Z") + + await setup_platform(hass, CALENDAR_DOMAIN) + + # 1 week in range of data + result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: ["calendar.epic_games_store_discount_games"], + EVENT_START_DATETIME: dt_util.parse_datetime("2022-10-20T00:00:00.000Z"), + EVENT_END_DATETIME: dt_util.parse_datetime("2022-10-27T00:00:00.000Z"), + }, + blocking=True, + return_response=True, + ) + + assert len(result["calendar.epic_games_store_discount_games"]["events"]) == 3 + + # 1 week out of range of data + result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: ["calendar.epic_games_store_discount_games"], + EVENT_START_DATETIME: dt_util.parse_datetime("1970-01-01T00:00:00.000Z"), + EVENT_END_DATETIME: dt_util.parse_datetime("1970-01-08T00:00:00.000Z"), + }, + blocking=True, + return_response=True, + ) + + assert len(result["calendar.epic_games_store_discount_games"]["events"]) == 0 diff --git a/tests/components/epic_games_store/test_config_flow.py b/tests/components/epic_games_store/test_config_flow.py new file mode 100644 index 00000000000..83e9cf9e99e --- /dev/null +++ b/tests/components/epic_games_store/test_config_flow.py @@ -0,0 +1,142 @@ +"""Test the Epic Games Store config flow.""" + +from http.client import HTTPException +from unittest.mock import patch + +from homeassistant import config_entries +from homeassistant.components.epic_games_store.config_flow import get_default_language +from homeassistant.components.epic_games_store.const import DOMAIN +from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND, + DATA_ERROR_WRONG_COUNTRY, + DATA_FREE_GAMES, + MOCK_COUNTRY, + MOCK_LANGUAGE, +) + + +async def test_default_language(hass: HomeAssistant) -> None: + """Test we get the form.""" + hass.config.language = "fr" + hass.config.country = "FR" + assert get_default_language(hass) == "fr" + + hass.config.language = "es" + hass.config.country = "ES" + assert get_default_language(hass) == "es-ES" + + hass.config.language = "en" + hass.config.country = "AZ" + assert get_default_language(hass) is None + + +async def test_form(hass: HomeAssistant) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + assert result["errors"] is None + + with patch( + "homeassistant.components.epic_games_store.config_flow.EpicGamesStoreAPI.get_free_games", + return_value=DATA_FREE_GAMES, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + await hass.async_block_till_done() + + assert result2["type"] == FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == f"freegames-{MOCK_LANGUAGE}-{MOCK_COUNTRY}" + assert ( + result2["title"] + == f"Epic Games Store - Free Games ({MOCK_LANGUAGE}-{MOCK_COUNTRY})" + ) + assert result2["data"] == { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + } + + +async def test_form_cannot_connect(hass: HomeAssistant) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.epic_games_store.config_flow.EpicGamesStoreAPI.get_free_games", + side_effect=HTTPException, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} + + +async def test_form_cannot_connect_wrong_param(hass: HomeAssistant) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.epic_games_store.config_flow.EpicGamesStoreAPI.get_free_games", + return_value=DATA_ERROR_WRONG_COUNTRY, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + + assert result2["type"] == FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} + + +async def test_form_service_error(hass: HomeAssistant) -> None: + """Test we handle service error gracefully.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.epic_games_store.config_flow.EpicGamesStoreAPI.get_free_games", + return_value=DATA_ERROR_ATTRIBUTE_NOT_FOUND, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + await hass.async_block_till_done() + + assert result2["type"] == FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == f"freegames-{MOCK_LANGUAGE}-{MOCK_COUNTRY}" + assert ( + result2["title"] + == f"Epic Games Store - Free Games ({MOCK_LANGUAGE}-{MOCK_COUNTRY})" + ) + assert result2["data"] == { + CONF_LANGUAGE: MOCK_LANGUAGE, + CONF_COUNTRY: MOCK_COUNTRY, + } diff --git a/tests/components/epic_games_store/test_helper.py b/tests/components/epic_games_store/test_helper.py new file mode 100644 index 00000000000..155ccb7d211 --- /dev/null +++ b/tests/components/epic_games_store/test_helper.py @@ -0,0 +1,74 @@ +"""Tests for the Epic Games Store helpers.""" + +from typing import Any + +import pytest + +from homeassistant.components.epic_games_store.helper import ( + format_game_data, + get_game_url, + is_free_game, +) + +from .const import DATA_ERROR_ATTRIBUTE_NOT_FOUND, DATA_FREE_GAMES_ONE + +FREE_GAMES_API = DATA_FREE_GAMES_ONE["data"]["Catalog"]["searchStore"]["elements"] +FREE_GAME = FREE_GAMES_API[2] +NOT_FREE_GAME = FREE_GAMES_API[0] + + +def test_format_game_data() -> None: + """Test game data format.""" + game_data = format_game_data(FREE_GAME, "fr") + assert game_data + assert game_data["title"] + assert game_data["description"] + assert game_data["released_at"] + assert game_data["original_price"] + assert game_data["publisher"] + assert game_data["url"] + assert game_data["img_portrait"] + assert game_data["img_landscape"] + assert game_data["discount_type"] == "free" + assert game_data["discount_start_at"] + assert game_data["discount_end_at"] + + +@pytest.mark.parametrize( + ("raw_game_data", "expected_result"), + [ + ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND["data"]["Catalog"]["searchStore"][ + "elements" + ][1], + "/p/destiny-2--bungie-30th-anniversary-pack", + ), + ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND["data"]["Catalog"]["searchStore"][ + "elements" + ][4], + "/bundles/qube-ultimate-bundle", + ), + ( + DATA_ERROR_ATTRIBUTE_NOT_FOUND["data"]["Catalog"]["searchStore"][ + "elements" + ][5], + "/p/mystery-game-7", + ), + ], +) +def test_get_game_url(raw_game_data: dict[str, Any], expected_result: bool) -> None: + """Test to get the game URL.""" + assert get_game_url(raw_game_data, "fr").endswith(expected_result) + + +@pytest.mark.parametrize( + ("raw_game_data", "expected_result"), + [ + (FREE_GAME, True), + (NOT_FREE_GAME, False), + ], +) +def test_is_free_game(raw_game_data: dict[str, Any], expected_result: bool) -> None: + """Test if this game is free.""" + assert is_free_game(raw_game_data) == expected_result diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index e23f020991d..f71b4196be6 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -181,7 +181,9 @@ async def mock_dashboard(hass): class MockESPHomeDevice: """Mock an esphome device.""" - def __init__(self, entry: MockConfigEntry, client: APIClient) -> None: + def __init__( + self, entry: MockConfigEntry, client: APIClient, device_info: DeviceInfo + ) -> None: """Init the mock.""" self.entry = entry self.client = client @@ -193,6 +195,7 @@ class MockESPHomeDevice: self.home_assistant_state_subscription_callback: Callable[ [str, str | None], None ] + self.device_info = device_info def set_state_callback(self, state_callback: Callable[[EntityState], None]) -> None: """Set the state callback.""" @@ -274,8 +277,6 @@ async def _mock_generic_device_entry( ) entry.add_to_hass(hass) - mock_device = MockESPHomeDevice(entry, mock_client) - default_device_info = { "name": "test", "friendly_name": "Test", @@ -284,6 +285,8 @@ async def _mock_generic_device_entry( } device_info = DeviceInfo(**(default_device_info | mock_device_info)) + mock_device = MockESPHomeDevice(entry, mock_client, device_info) + def _subscribe_states(callback: Callable[[EntityState], None]) -> None: """Subscribe to state.""" mock_device.set_state_callback(callback) @@ -302,7 +305,7 @@ async def _mock_generic_device_entry( """Subscribe to home assistant states.""" mock_device.set_home_assistant_state_subscription_callback(on_state_sub) - mock_client.device_info = AsyncMock(return_value=device_info) + mock_client.device_info = AsyncMock(return_value=mock_device.device_info) mock_client.subscribe_voice_assistant = Mock() mock_client.list_entities_services = AsyncMock( return_value=mock_list_entities_services diff --git a/tests/components/esphome/test_datetime.py b/tests/components/esphome/test_datetime.py new file mode 100644 index 00000000000..3bdc196de95 --- /dev/null +++ b/tests/components/esphome/test_datetime.py @@ -0,0 +1,79 @@ +"""Test ESPHome datetimes.""" + +from unittest.mock import call + +from aioesphomeapi import APIClient, DateTimeInfo, DateTimeState + +from homeassistant.components.datetime import ( + ATTR_DATETIME, + DOMAIN as DATETIME_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant + + +async def test_generic_datetime_entity( + hass: HomeAssistant, + mock_client: APIClient, + mock_generic_device_entry, +) -> None: + """Test a generic datetime entity.""" + entity_info = [ + DateTimeInfo( + object_id="mydatetime", + key=1, + name="my datetime", + unique_id="my_datetime", + ) + ] + states = [DateTimeState(key=1, epoch_seconds=1713270896)] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("datetime.test_mydatetime") + assert state is not None + assert state.state == "2024-04-16T12:34:56+00:00" + + await hass.services.async_call( + DATETIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: "datetime.test_mydatetime", + ATTR_DATETIME: "2000-01-01T01:23:45+00:00", + }, + blocking=True, + ) + mock_client.datetime_command.assert_has_calls([call(1, 946689825)]) + mock_client.datetime_command.reset_mock() + + +async def test_generic_datetime_missing_state( + hass: HomeAssistant, + mock_client: APIClient, + mock_generic_device_entry, +) -> None: + """Test a generic datetime entity with missing state.""" + entity_info = [ + DateTimeInfo( + object_id="mydatetime", + key=1, + name="my datetime", + unique_id="my_datetime", + ) + ] + states = [DateTimeState(key=1, missing_state=True)] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("datetime.test_mydatetime") + assert state is not None + assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_event.py b/tests/components/esphome/test_event.py new file mode 100644 index 00000000000..c17dc4d98a9 --- /dev/null +++ b/tests/components/esphome/test_event.py @@ -0,0 +1,38 @@ +"""Test ESPHome Events.""" + +from aioesphomeapi import APIClient, Event, EventInfo +import pytest + +from homeassistant.components.event import EventDeviceClass +from homeassistant.core import HomeAssistant + + +@pytest.mark.freeze_time("2024-04-24 00:00:00+00:00") +async def test_generic_event_entity( + hass: HomeAssistant, + mock_client: APIClient, + mock_generic_device_entry, +) -> None: + """Test a generic event entity.""" + entity_info = [ + EventInfo( + object_id="myevent", + key=1, + name="my event", + unique_id="my_event", + event_types=["type1", "type2"], + device_class=EventDeviceClass.BUTTON, + ) + ] + states = [Event(key=1, event_type="type1")] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("event.test_myevent") + assert state is not None + assert state.state == "2024-04-24T00:00:00.000+00:00" + assert state.attributes["event_type"] == "type1" diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 959ad12876d..b3deb2f33ee 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -1,7 +1,6 @@ """Test ESPHome update entities.""" from collections.abc import Awaitable, Callable -import dataclasses from unittest.mock import Mock, patch from aioesphomeapi import APIClient, EntityInfo, EntityState, UserService @@ -18,7 +17,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import async_dispatcher_send from .conftest import MockESPHomeDevice @@ -176,9 +174,11 @@ async def test_update_entity( async def test_update_static_info( hass: HomeAssistant, - stub_reconnect, - mock_config_entry, - mock_device_info, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], mock_dashboard, ) -> None: """Test ESPHome update entity.""" @@ -190,32 +190,25 @@ async def test_update_static_info( ] await async_get_dashboard(hass).async_refresh() - signal_static_info_updated = f"esphome_{mock_config_entry.entry_id}_on_list" - runtime_data = Mock( - available=True, - device_info=mock_device_info, - signal_static_info_updated=signal_static_info_updated, + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], ) - with patch( - "homeassistant.components.esphome.update.DomainData.get_entry_data", - return_value=runtime_data, - ): - assert await hass.config_entries.async_forward_entry_setup( - mock_config_entry, "update" - ) - - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state is not None - assert state.state == "on" + assert state.state == STATE_ON - runtime_data.device_info = dataclasses.replace( - runtime_data.device_info, esphome_version="1.2.3" - ) - async_dispatcher_send(hass, signal_static_info_updated, []) + object.__setattr__(mock_device.device_info, "esphome_version", "1.2.3") + await mock_device.mock_disconnect(True) + await mock_device.mock_connect() - state = hass.states.get("update.none_firmware") - assert state.state == "off" + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("update.test_firmware") + assert state.state == STATE_OFF @pytest.mark.parametrize( diff --git a/tests/components/evil_genius_labs/conftest.py b/tests/components/evil_genius_labs/conftest.py index 49092da75c7..3941917e130 100644 --- a/tests/components/evil_genius_labs/conftest.py +++ b/tests/components/evil_genius_labs/conftest.py @@ -10,20 +10,20 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def all_fixture(): """Fixture data.""" data = json.loads(load_fixture("data.json", "evil_genius_labs")) return {item["name"]: item for item in data} -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def info_fixture(): """Fixture info.""" return json.loads(load_fixture("info.json", "evil_genius_labs")) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def product_fixture(): """Fixture info.""" return {"productName": "Fibonacci256"} diff --git a/tests/components/fibaro/test_config_flow.py b/tests/components/fibaro/test_config_flow.py index dcf5f12a24a..b6b4e3992cd 100644 --- a/tests/components/fibaro/test_config_flow.py +++ b/tests/components/fibaro/test_config_flow.py @@ -89,36 +89,6 @@ async def test_config_flow_user_initiated_success(hass: HomeAssistant) -> None: } -async def test_config_flow_user_initiated_connect_failure( - hass: HomeAssistant, mock_fibaro_client: Mock -) -> None: - """Connect failure in flow manually initialized by the user.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - mock_fibaro_client.connect.return_value = False - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_URL: TEST_URL, - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "cannot_connect"} - - await _recovery_after_failure_works(hass, mock_fibaro_client, result) - - async def test_config_flow_user_initiated_auth_failure( hass: HomeAssistant, mock_fibaro_client: Mock ) -> None: diff --git a/tests/components/folder_watcher/conftest.py b/tests/components/folder_watcher/conftest.py new file mode 100644 index 00000000000..06c0a41d49c --- /dev/null +++ b/tests/components/folder_watcher/conftest.py @@ -0,0 +1,17 @@ +"""Fixtures for Folder Watcher integration tests.""" + +from __future__ import annotations + +from collections.abc import Generator +from unittest.mock import patch + +import pytest + + +@pytest.fixture +def mock_setup_entry() -> Generator[None, None, None]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.folder_watcher.async_setup_entry", return_value=True + ): + yield diff --git a/tests/components/folder_watcher/test_config_flow.py b/tests/components/folder_watcher/test_config_flow.py new file mode 100644 index 00000000000..745059717fb --- /dev/null +++ b/tests/components/folder_watcher/test_config_flow.py @@ -0,0 +1,186 @@ +"""Test the Folder Watcher config flow.""" + +from pathlib import Path +from unittest.mock import patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.folder_watcher.const import ( + CONF_FOLDER, + CONF_PATTERNS, + DOMAIN, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +async def test_form(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we get the form.""" + path = tmp_path.as_posix() + hass.config.allowlist_external_dirs = {path} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_form_not_allowed_path(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we handle not allowed path.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "not_allowed_dir"} + + hass.config.allowlist_external_dirs = {tmp_path} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_form_not_directory(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we handle not a directory.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: "not_a_directory"}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "not_dir"} + + hass.config.allowlist_external_dirs = {path} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_form_not_readable_dir(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we handle not able to read directory.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch("os.access", return_value=False): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "not_readable_dir"} + + hass.config.allowlist_external_dirs = {path} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_form_already_configured(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we abort when entry is already configured.""" + path = tmp_path.as_posix() + hass.config.allowlist_external_dirs = {path} + + entry = MockConfigEntry( + domain=DOMAIN, + title=f"Folder Watcher {path}", + data={CONF_FOLDER: path}, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FOLDER: path}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import(hass: HomeAssistant, tmp_path: Path) -> None: + """Test import flow.""" + path = tmp_path.as_posix() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_FOLDER: path, CONF_PATTERNS: ["*"]}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["title"] == f"Folder Watcher {path}" + assert result["options"] == {CONF_FOLDER: path, CONF_PATTERNS: ["*"]} + + +async def test_import_already_configured(hass: HomeAssistant, tmp_path: Path) -> None: + """Test we abort import when entry is already configured.""" + path = tmp_path.as_posix() + + entry = MockConfigEntry( + domain=DOMAIN, + title=f"Folder Watcher {path}", + data={CONF_FOLDER: path}, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_FOLDER: path}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/freebox/conftest.py b/tests/components/freebox/conftest.py index cf520043755..2fe4e1b77de 100644 --- a/tests/components/freebox/conftest.py +++ b/tests/components/freebox/conftest.py @@ -108,8 +108,7 @@ def mock_router_bridge_mode(mock_device_registry_devices, router): router().lan.get_hosts_list = AsyncMock( side_effect=HttpRequestError( - "Request failed (APIResponse: %s)" - % json.dumps(DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE) + f"Request failed (APIResponse: {json.dumps(DATA_LAN_GET_HOSTS_LIST_MODE_BRIDGE)})" ) ) diff --git a/tests/components/fritz/conftest.py b/tests/components/fritz/conftest.py index e32ca55f65d..acf6b0e98cd 100644 --- a/tests/components/fritz/conftest.py +++ b/tests/components/fritz/conftest.py @@ -74,16 +74,6 @@ class FritzConnectionMock: return self._services[service][action] -class FritzHostMock(FritzHosts): - """FritzHosts mocking.""" - - get_mesh_topology = MagicMock() - get_mesh_topology.return_value = MOCK_MESH_DATA - - get_hosts_attributes = MagicMock() - get_hosts_attributes.return_value = MOCK_HOST_ATTRIBUTES_DATA - - @pytest.fixture(name="fc_data") def fc_data_mock(): """Fixture for default fc_data.""" @@ -105,6 +95,8 @@ def fh_class_mock(): """Fixture that sets up a mocked FritzHosts class.""" with patch( "homeassistant.components.fritz.common.FritzHosts", - new=FritzHostMock, + new=FritzHosts, ) as result: + result.get_mesh_topology = MagicMock(return_value=MOCK_MESH_DATA) + result.get_hosts_attributes = MagicMock(return_value=MOCK_HOST_ATTRIBUTES_DATA) yield result diff --git a/tests/components/fritz/const.py b/tests/components/fritz/const.py index ce530e32964..0d1222dfcda 100644 --- a/tests/components/fritz/const.py +++ b/tests/components/fritz/const.py @@ -8,6 +8,7 @@ from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, + CONF_SSL, CONF_USERNAME, ) @@ -22,10 +23,12 @@ MOCK_CONFIG = { CONF_PORT: "1234", CONF_PASSWORD: "fake_pass", CONF_USERNAME: "fake_user", + CONF_SSL: False, } ] } } + MOCK_HOST = "fake_host" MOCK_IPS = { "fritz.box": "192.168.178.1", @@ -902,6 +905,14 @@ MOCK_HOST_ATTRIBUTES_DATA = [ ] MOCK_USER_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] +MOCK_USER_INPUT_ADVANCED = MOCK_USER_DATA +MOCK_USER_INPUT_SIMPLE = { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_SSL: False, +} + MOCK_DEVICE_INFO = { ATTR_HOST: MOCK_HOST, ATTR_NEW_SERIAL_NUMBER: MOCK_SERIAL_NUMBER, diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index 074d32bf0ca..f87fbe722cd 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -23,8 +23,19 @@ from homeassistant.components.fritz.const import ( FRITZ_AUTH_EXCEPTIONS, ) from homeassistant.components.ssdp import ATTR_UPNP_UDN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_SSDP, + SOURCE_USER, +) +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_SSL, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -34,12 +45,59 @@ from .const import ( MOCK_REQUEST, MOCK_SSDP_DATA, MOCK_USER_DATA, + MOCK_USER_INPUT_ADVANCED, + MOCK_USER_INPUT_SIMPLE, ) from tests.common import MockConfigEntry -async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> None: +@pytest.mark.parametrize( + ("show_advanced_options", "user_input", "expected_config"), + [ + ( + True, + MOCK_USER_INPUT_ADVANCED, + { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_PORT: 1234, + CONF_SSL: False, + }, + ), + ( + False, + MOCK_USER_INPUT_SIMPLE, + { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_PORT: 49000, + CONF_SSL: False, + }, + ), + ( + False, + {**MOCK_USER_INPUT_SIMPLE, CONF_SSL: True}, + { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_PORT: 49443, + CONF_SSL: True, + }, + ), + ], +) +async def test_user( + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, + show_advanced_options: bool, + user_input: dict, + expected_config: dict, +) -> None: """Test starting a flow by user.""" with ( patch( @@ -68,18 +126,20 @@ async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N mock_request_post.return_value.text = MOCK_REQUEST result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={ + "source": SOURCE_USER, + "show_advanced_options": show_advanced_options, + }, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=user_input ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_HOST] == "fake_host" - assert result["data"][CONF_PASSWORD] == "fake_pass" - assert result["data"][CONF_USERNAME] == "fake_user" + assert result["data"] == expected_config assert ( result["options"][CONF_CONSIDER_HOME] == DEFAULT_CONSIDER_HOME.total_seconds() @@ -90,12 +150,20 @@ async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N assert mock_setup_entry.called +@pytest.mark.parametrize( + ("show_advanced_options", "user_input"), + [(True, MOCK_USER_INPUT_ADVANCED), (False, MOCK_USER_INPUT_SIMPLE)], +) async def test_user_already_configured( - hass: HomeAssistant, fc_class_mock, mock_get_source_ip + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, + show_advanced_options: bool, + user_input, ) -> None: """Test starting a flow by user with an already configured device.""" - mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config = MockConfigEntry(domain=DOMAIN, data=user_input) mock_config.add_to_hass(hass) with ( @@ -124,13 +192,17 @@ async def test_user_already_configured( mock_request_post.return_value.text = MOCK_REQUEST result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={ + "source": SOURCE_USER, + "show_advanced_options": show_advanced_options, + }, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=MOCK_USER_INPUT_SIMPLE ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -141,13 +213,22 @@ async def test_user_already_configured( "error", FRITZ_AUTH_EXCEPTIONS, ) +@pytest.mark.parametrize( + ("show_advanced_options", "user_input"), + [(True, MOCK_USER_INPUT_ADVANCED), (False, MOCK_USER_INPUT_SIMPLE)], +) async def test_exception_security( - hass: HomeAssistant, mock_get_source_ip, error + hass: HomeAssistant, + mock_get_source_ip, + error, + show_advanced_options: bool, + user_input, ) -> None: """Test starting a flow by user with invalid credentials.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={"source": SOURCE_USER, "show_advanced_options": show_advanced_options}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -157,7 +238,7 @@ async def test_exception_security( side_effect=error, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=user_input ) assert result["type"] is FlowResultType.FORM @@ -165,11 +246,21 @@ async def test_exception_security( assert result["errors"]["base"] == ERROR_AUTH_INVALID -async def test_exception_connection(hass: HomeAssistant, mock_get_source_ip) -> None: +@pytest.mark.parametrize( + ("show_advanced_options", "user_input"), + [(True, MOCK_USER_INPUT_ADVANCED), (False, MOCK_USER_INPUT_SIMPLE)], +) +async def test_exception_connection( + hass: HomeAssistant, + mock_get_source_ip, + show_advanced_options: bool, + user_input, +) -> None: """Test starting a flow by user with a connection error.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={"source": SOURCE_USER, "show_advanced_options": show_advanced_options}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -179,7 +270,7 @@ async def test_exception_connection(hass: HomeAssistant, mock_get_source_ip) -> side_effect=FritzConnectionException, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=user_input ) assert result["type"] is FlowResultType.FORM @@ -187,11 +278,18 @@ async def test_exception_connection(hass: HomeAssistant, mock_get_source_ip) -> assert result["errors"]["base"] == ERROR_CANNOT_CONNECT -async def test_exception_unknown(hass: HomeAssistant, mock_get_source_ip) -> None: +@pytest.mark.parametrize( + ("show_advanced_options", "user_input"), + [(True, MOCK_USER_INPUT_ADVANCED), (False, MOCK_USER_INPUT_SIMPLE)], +) +async def test_exception_unknown( + hass: HomeAssistant, mock_get_source_ip, show_advanced_options: bool, user_input +) -> None: """Test starting a flow by user with an unknown exception.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} + DOMAIN, + context={"source": SOURCE_USER, "show_advanced_options": show_advanced_options}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -201,7 +299,7 @@ async def test_exception_unknown(hass: HomeAssistant, mock_get_source_ip) -> Non side_effect=OSError, ): result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_DATA + result["flow_id"], user_input=user_input ) assert result["type"] is FlowResultType.FORM @@ -210,7 +308,9 @@ async def test_exception_unknown(hass: HomeAssistant, mock_get_source_ip) -> Non async def test_reauth_successful( - hass: HomeAssistant, fc_class_mock, mock_get_source_ip + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, ) -> None: """Test starting a reauthentication flow.""" @@ -273,7 +373,11 @@ async def test_reauth_successful( ], ) async def test_reauth_not_successful( - hass: HomeAssistant, fc_class_mock, mock_get_source_ip, side_effect, error + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, + side_effect, + error, ) -> None: """Test starting a reauthentication flow but no connection found.""" @@ -306,6 +410,176 @@ async def test_reauth_not_successful( assert result["errors"]["base"] == error +@pytest.mark.parametrize( + ("show_advanced_options", "user_input", "expected_config"), + [ + ( + True, + {CONF_HOST: "host_a", CONF_PORT: 49000, CONF_SSL: False}, + {CONF_HOST: "host_a", CONF_PORT: 49000, CONF_SSL: False}, + ), + ( + True, + {CONF_HOST: "host_a", CONF_PORT: 49443, CONF_SSL: True}, + {CONF_HOST: "host_a", CONF_PORT: 49443, CONF_SSL: True}, + ), + ( + True, + {CONF_HOST: "host_a", CONF_PORT: 12345, CONF_SSL: True}, + {CONF_HOST: "host_a", CONF_PORT: 12345, CONF_SSL: True}, + ), + ( + False, + {CONF_HOST: "host_b", CONF_SSL: False}, + {CONF_HOST: "host_b", CONF_PORT: 49000, CONF_SSL: False}, + ), + ( + False, + {CONF_HOST: "host_b", CONF_SSL: True}, + {CONF_HOST: "host_b", CONF_PORT: 49443, CONF_SSL: True}, + ), + ], +) +async def test_reconfigure_successful( + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, + show_advanced_options: bool, + user_input: dict, + expected_config: dict, +) -> None: + """Test starting a reconfigure flow.""" + + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.fritz.config_flow.FritzConnection", + side_effect=fc_class_mock, + ), + patch( + "homeassistant.components.fritz.common.FritzBoxTools._update_device_info", + return_value=MOCK_FIRMWARE_INFO, + ), + patch( + "homeassistant.components.fritz.async_setup_entry", + ) as mock_setup_entry, + patch( + "requests.get", + ) as mock_request_get, + patch( + "requests.post", + ) as mock_request_post, + ): + mock_request_get.return_value.status_code = 200 + mock_request_get.return_value.content = MOCK_REQUEST + mock_request_post.return_value.status_code = 200 + mock_request_post.return_value.text = MOCK_REQUEST + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "entry_id": mock_config.entry_id, + "show_advanced_options": show_advanced_options, + }, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config.data == { + **expected_config, + CONF_USERNAME: "fake_user", + CONF_PASSWORD: "fake_pass", + } + + assert mock_setup_entry.called + + +async def test_reconfigure_not_successful( + hass: HomeAssistant, + fc_class_mock, + mock_get_source_ip, +) -> None: + """Test starting a reconfigure flow but no connection found.""" + + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.fritz.config_flow.FritzConnection", + side_effect=[FritzConnectionException, fc_class_mock], + ), + patch( + "homeassistant.components.fritz.common.FritzBoxTools._update_device_info", + return_value=MOCK_FIRMWARE_INFO, + ), + patch( + "homeassistant.components.fritz.async_setup_entry", + ), + patch( + "requests.get", + ) as mock_request_get, + patch( + "requests.post", + ) as mock_request_post, + ): + mock_request_get.return_value.status_code = 200 + mock_request_get.return_value.content = MOCK_REQUEST + mock_request_post.return_value.status_code = 200 + mock_request_post.return_value.text = MOCK_REQUEST + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "fake_host", + CONF_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + assert result["errors"]["base"] == ERROR_CANNOT_CONNECT + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "fake_host", + CONF_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config.data == { + CONF_HOST: "fake_host", + CONF_PASSWORD: "fake_pass", + CONF_USERNAME: "fake_user", + CONF_PORT: 49000, + CONF_SSL: False, + } + + async def test_ssdp_already_configured( hass: HomeAssistant, fc_class_mock, mock_get_source_ip ) -> None: diff --git a/tests/components/fritz/test_switch.py b/tests/components/fritz/test_switch.py index adb5c3f6799..b82587d42bd 100644 --- a/tests/components/fritz/test_switch.py +++ b/tests/components/fritz/test_switch.py @@ -15,6 +15,8 @@ from tests.common import MockConfigEntry MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "WLANConfiguration1": { + "GetSSID": {"NewSSID": "WiFi"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -34,9 +36,11 @@ MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, "WLANConfiguration2": { + "GetSSID": {"NewSSID": "WiFi"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -56,11 +60,13 @@ MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, } MOCK_WLANCONFIGS_DIFF_SSID: dict[str, dict] = { "WLANConfiguration1": { + "GetSSID": {"NewSSID": "WiFi"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -80,9 +86,11 @@ MOCK_WLANCONFIGS_DIFF_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, "WLANConfiguration2": { + "GetSSID": {"NewSSID": "WiFi2"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -102,11 +110,13 @@ MOCK_WLANCONFIGS_DIFF_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, } MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { "WLANConfiguration1": { + "GetSSID": {"NewSSID": "WiFi"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -126,9 +136,11 @@ MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, "WLANConfiguration2": { + "GetSSID": {"NewSSID": "WiFi+"}, + "GetSecurityKeys": {"NewKeyPassphrase": "mysecret"}, "GetInfo": { "NewEnable": True, "NewStatus": "Up", @@ -148,7 +160,7 @@ MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { "NewMinCharsPSK": 64, "NewMaxCharsPSK": 64, "NewAllowedCharsPSK": "0123456789ABCDEFabcdef", - } + }, }, } @@ -179,7 +191,7 @@ async def test_switch_setup( entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert entry.state is ConfigEntryState.LOADED switches = hass.states.async_all(Platform.SWITCH) diff --git a/tests/components/fritzbox/test_config_flow.py b/tests/components/fritzbox/test_config_flow.py index 53a4f1c5205..72d36a8ab63 100644 --- a/tests/components/fritzbox/test_config_flow.py +++ b/tests/components/fritzbox/test_config_flow.py @@ -12,7 +12,12 @@ from requests.exceptions import HTTPError from homeassistant.components import ssdp from homeassistant.components.fritzbox.const import DOMAIN from homeassistant.components.ssdp import ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_UDN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_SSDP, + SOURCE_USER, +) from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -202,6 +207,80 @@ async def test_reauth_not_successful(hass: HomeAssistant, fritz: Mock) -> None: assert result["reason"] == "no_devices_found" +async def test_reconfigure_success(hass: HomeAssistant, fritz: Mock) -> None: + """Test starting a reconfigure flow.""" + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config.add_to_hass(hass) + + assert mock_config.data[CONF_HOST] == "10.0.0.1" + assert mock_config.data[CONF_USERNAME] == "fake_user" + assert mock_config.data[CONF_PASSWORD] == "fake_pass" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "new_host", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config.data[CONF_HOST] == "new_host" + assert mock_config.data[CONF_USERNAME] == "fake_user" + assert mock_config.data[CONF_PASSWORD] == "fake_pass" + + +async def test_reconfigure_failed(hass: HomeAssistant, fritz: Mock) -> None: + """Test starting a reconfigure flow with failure.""" + fritz().login.side_effect = [OSError("Boom"), None] + + mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + mock_config.add_to_hass(hass) + + assert mock_config.data[CONF_HOST] == "10.0.0.1" + assert mock_config.data[CONF_USERNAME] == "fake_user" + assert mock_config.data[CONF_PASSWORD] == "fake_pass" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config.entry_id}, + data=mock_config.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "new_host", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + assert result["errors"]["base"] == "no_devices_found" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "new_host", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config.data[CONF_HOST] == "new_host" + assert mock_config.data[CONF_USERNAME] == "fake_user" + assert mock_config.data[CONF_PASSWORD] == "fake_pass" + + @pytest.mark.parametrize( ("test_data", "expected_result"), [ diff --git a/tests/components/fritzbox/test_coordinator.py b/tests/components/fritzbox/test_coordinator.py new file mode 100644 index 00000000000..401fab8f169 --- /dev/null +++ b/tests/components/fritzbox/test_coordinator.py @@ -0,0 +1,111 @@ +"""Tests for the AVM Fritz!Box integration.""" + +from __future__ import annotations + +from datetime import timedelta +from unittest.mock import Mock + +from pyfritzhome import LoginError +from requests.exceptions import ConnectionError, HTTPError + +from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_DEVICES +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.dt import utcnow + +from . import FritzDeviceCoverMock, FritzDeviceSwitchMock +from .const import MOCK_CONFIG + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_coordinator_update_after_reboot( + hass: HomeAssistant, fritz: Mock +) -> None: + """Test coordinator after reboot.""" + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id="any", + ) + entry.add_to_hass(hass) + fritz().update_devices.side_effect = [HTTPError(), ""] + + assert await hass.config_entries.async_setup(entry.entry_id) + assert fritz().update_devices.call_count == 2 + assert fritz().update_templates.call_count == 1 + assert fritz().get_devices.call_count == 1 + assert fritz().get_templates.call_count == 1 + assert fritz().login.call_count == 2 + + +async def test_coordinator_update_after_password_change( + hass: HomeAssistant, fritz: Mock +) -> None: + """Test coordinator after password change.""" + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id="any", + ) + entry.add_to_hass(hass) + fritz().update_devices.side_effect = HTTPError() + fritz().login.side_effect = ["", LoginError("some_user")] + + assert not await hass.config_entries.async_setup(entry.entry_id) + assert fritz().update_devices.call_count == 1 + assert fritz().get_devices.call_count == 0 + assert fritz().get_templates.call_count == 0 + assert fritz().login.call_count == 2 + + +async def test_coordinator_update_when_unreachable( + hass: HomeAssistant, fritz: Mock +) -> None: + """Test coordinator after reboot.""" + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id="any", + ) + entry.add_to_hass(hass) + fritz().update_devices.side_effect = [ConnectionError(), ""] + + assert not await hass.config_entries.async_setup(entry.entry_id) + assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_coordinator_automatic_registry_cleanup( + hass: HomeAssistant, + fritz: Mock, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test automatic registry cleanup.""" + fritz().get_devices.return_value = [ + FritzDeviceSwitchMock(ain="fake ain switch", name="fake_switch"), + FritzDeviceCoverMock(ain="fake ain cover", name="fake_cover"), + ] + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id="any", + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 11 + assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 2 + + fritz().get_devices.return_value = [ + FritzDeviceSwitchMock(ain="fake ain switch", name="fake_switch") + ] + + async_fire_time_changed(hass, utcnow() + timedelta(seconds=35)) + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 8 + assert len(dr.async_entries_for_config_entry(device_registry, entry.entry_id)) == 1 diff --git a/tests/components/fritzbox/test_init.py b/tests/components/fritzbox/test_init.py index 4ee351f7914..8d7e4249fbd 100644 --- a/tests/components/fritzbox/test_init.py +++ b/tests/components/fritzbox/test_init.py @@ -6,7 +6,7 @@ from unittest.mock import Mock, call, patch from pyfritzhome import LoginError import pytest -from requests.exceptions import ConnectionError, HTTPError +from requests.exceptions import ConnectionError as RequestConnectionError from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN @@ -80,6 +80,7 @@ async def test_update_unique_id( new_unique_id: str, ) -> None: """Test unique_id update of integration.""" + fritz().get_devices.return_value = [FritzDeviceSwitchMock()] entry = MockConfigEntry( domain=FB_DOMAIN, data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], @@ -138,6 +139,7 @@ async def test_update_unique_id_no_change( unique_id: str, ) -> None: """Test unique_id is not updated of integration.""" + fritz().get_devices.return_value = [FritzDeviceSwitchMock()] entry = MockConfigEntry( domain=FB_DOMAIN, data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], @@ -158,62 +160,6 @@ async def test_update_unique_id_no_change( assert entity_migrated.unique_id == unique_id -async def test_coordinator_update_after_reboot( - hass: HomeAssistant, fritz: Mock -) -> None: - """Test coordinator after reboot.""" - entry = MockConfigEntry( - domain=FB_DOMAIN, - data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], - unique_id="any", - ) - entry.add_to_hass(hass) - fritz().update_devices.side_effect = [HTTPError(), ""] - - assert await hass.config_entries.async_setup(entry.entry_id) - assert fritz().update_devices.call_count == 2 - assert fritz().update_templates.call_count == 1 - assert fritz().get_devices.call_count == 1 - assert fritz().get_templates.call_count == 1 - assert fritz().login.call_count == 2 - - -async def test_coordinator_update_after_password_change( - hass: HomeAssistant, fritz: Mock -) -> None: - """Test coordinator after password change.""" - entry = MockConfigEntry( - domain=FB_DOMAIN, - data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], - unique_id="any", - ) - entry.add_to_hass(hass) - fritz().update_devices.side_effect = HTTPError() - fritz().login.side_effect = ["", LoginError("some_user")] - - assert not await hass.config_entries.async_setup(entry.entry_id) - assert fritz().update_devices.call_count == 1 - assert fritz().get_devices.call_count == 0 - assert fritz().get_templates.call_count == 0 - assert fritz().login.call_count == 2 - - -async def test_coordinator_update_when_unreachable( - hass: HomeAssistant, fritz: Mock -) -> None: - """Test coordinator after reboot.""" - entry = MockConfigEntry( - domain=FB_DOMAIN, - data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], - unique_id="any", - ) - entry.add_to_hass(hass) - fritz().update_devices.side_effect = [ConnectionError(), ""] - - assert not await hass.config_entries.async_setup(entry.entry_id) - assert entry.state is ConfigEntryState.SETUP_RETRY - - async def test_unload_remove(hass: HomeAssistant, fritz: Mock) -> None: """Test unload and remove of integration.""" fritz().get_devices.return_value = [FritzDeviceSwitchMock()] @@ -325,7 +271,7 @@ async def test_raise_config_entry_not_ready_when_offline(hass: HomeAssistant) -> entry.add_to_hass(hass) with patch( "homeassistant.components.fritzbox.Fritzhome.login", - side_effect=ConnectionError(), + side_effect=RequestConnectionError(), ) as mock_login: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/gios/snapshots/test_sensor.ambr b/tests/components/gios/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c67cc3e4d7c --- /dev/null +++ b/tests/components/gios/snapshots/test_sensor.ambr @@ -0,0 +1,774 @@ +# serializer version: 1 +# name: test_sensor[sensor.home_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Air quality index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aqi', + 'unique_id': '123-aqi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home Air quality index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_benzene-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_benzene', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Benzene', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'c6h6', + 'unique_id': '123-c6h6', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_benzene-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'friendly_name': 'Home Benzene', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_benzene', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23789', + }) +# --- +# name: test_sensor[sensor.home_carbon_monoxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_carbon_monoxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Carbon monoxide', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co', + 'unique_id': '123-co', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_carbon_monoxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'friendly_name': 'Home Carbon monoxide', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_carbon_monoxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '251.874', + }) +# --- +# name: test_sensor[sensor.home_nitrogen_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_nitrogen_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Nitrogen dioxide', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-no2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_nitrogen_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'nitrogen_dioxide', + 'friendly_name': 'Home Nitrogen dioxide', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_nitrogen_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.13411', + }) +# --- +# name: test_sensor[sensor.home_nitrogen_dioxide_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_nitrogen_dioxide_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Nitrogen dioxide index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'no2_index', + 'unique_id': '123-no2-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_nitrogen_dioxide_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home Nitrogen dioxide index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_nitrogen_dioxide_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_ozone-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_ozone', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ozone', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-o3', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_ozone-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'ozone', + 'friendly_name': 'Home Ozone', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_ozone', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95.7768', + }) +# --- +# name: test_sensor[sensor.home_ozone_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_ozone_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ozone index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'o3_index', + 'unique_id': '123-o3-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_ozone_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home Ozone index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_ozone_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-pm10', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'pm10', + 'friendly_name': 'Home PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16.8344', + }) +# --- +# name: test_sensor[sensor.home_pm10_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pm10_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10 index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pm10_index', + 'unique_id': '123-pm10-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_pm10_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home PM10 index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_pm10_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-pm25', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'pm25', + 'friendly_name': 'Home PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensor[sensor.home_pm2_5_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pm2_5_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5 index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pm25_index', + 'unique_id': '123-pm25-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_pm2_5_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home PM2.5 index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_pm2_5_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[sensor.home_sulphur_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_sulphur_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sulphur dioxide', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123-so2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.home_sulphur_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'sulphur_dioxide', + 'friendly_name': 'Home Sulphur dioxide', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.home_sulphur_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.35478', + }) +# --- +# name: test_sensor[sensor.home_sulphur_dioxide_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_sulphur_dioxide_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sulphur dioxide index', + 'platform': 'gios', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'so2_index', + 'unique_id': '123-so2-index', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.home_sulphur_dioxide_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by GIOŚ', + 'device_class': 'enum', + 'friendly_name': 'Home Sulphur dioxide index', + 'options': list([ + 'very_bad', + 'bad', + 'sufficient', + 'moderate', + 'good', + 'very_good', + ]), + }), + 'context': , + 'entity_id': 'sensor.home_sulphur_dioxide_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'very_good', + }) +# --- diff --git a/tests/components/gios/test_sensor.py b/tests/components/gios/test_sensor.py index 60e8722ba24..b24d88ccb8d 100644 --- a/tests/components/gios/test_sensor.py +++ b/tests/components/gios/test_sensor.py @@ -6,249 +6,28 @@ import json from unittest.mock import patch from gios import ApiError +from syrupy import SnapshotAssertion -from homeassistant.components.gios.const import ATTRIBUTION, DOMAIN -from homeassistant.components.sensor import ( - ATTR_OPTIONS, - ATTR_STATE_CLASS, - DOMAIN as PLATFORM, - SensorDeviceClass, - SensorStateClass, -) -from homeassistant.const import ( - ATTR_ATTRIBUTION, - ATTR_DEVICE_CLASS, - ATTR_ICON, - ATTR_UNIT_OF_MEASUREMENT, - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - STATE_UNAVAILABLE, -) +from homeassistant.components.gios.const import DOMAIN +from homeassistant.components.sensor import DOMAIN as PLATFORM +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow from . import init_integration -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_fixture, snapshot_platform -async def test_sensor(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test states of the sensor.""" - await init_integration(hass) + with patch("homeassistant.components.gios.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass) - state = hass.states.get("sensor.home_benzene") - assert state - assert state.state == "0.23789" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get("sensor.home_benzene") - assert entry - assert entry.unique_id == "123-c6h6" - - state = hass.states.get("sensor.home_carbon_monoxide") - assert state - assert state.state == "251.874" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_carbon_monoxide") - assert entry - assert entry.unique_id == "123-co" - - state = hass.states.get("sensor.home_nitrogen_dioxide") - assert state - assert state.state == "7.13411" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.NITROGEN_DIOXIDE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_nitrogen_dioxide") - assert entry - assert entry.unique_id == "123-no2" - - state = hass.states.get("sensor.home_nitrogen_dioxide_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_nitrogen_dioxide_index") - assert entry - assert entry.unique_id == "123-no2-index" - - state = hass.states.get("sensor.home_ozone") - assert state - assert state.state == "95.7768" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.OZONE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_ozone") - assert entry - assert entry.unique_id == "123-o3" - - state = hass.states.get("sensor.home_ozone_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_ozone_index") - assert entry - assert entry.unique_id == "123-o3-index" - - state = hass.states.get("sensor.home_pm10") - assert state - assert state.state == "16.8344" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM10 - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_pm10") - assert entry - assert entry.unique_id == "123-pm10" - - state = hass.states.get("sensor.home_pm10_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_pm10_index") - assert entry - assert entry.unique_id == "123-pm10-index" - - state = hass.states.get("sensor.home_pm2_5") - assert state - assert state.state == "4" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM25 - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_pm2_5") - assert entry - assert entry.unique_id == "123-pm25" - - state = hass.states.get("sensor.home_pm2_5_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_pm2_5_index") - assert entry - assert entry.unique_id == "123-pm25-index" - - state = hass.states.get("sensor.home_sulphur_dioxide") - assert state - assert state.state == "4.35478" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.SULPHUR_DIOXIDE - assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.home_sulphur_dioxide") - assert entry - assert entry.unique_id == "123-so2" - - state = hass.states.get("sensor.home_sulphur_dioxide_index") - assert state - assert state.state == "very_good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_sulphur_dioxide_index") - assert entry - assert entry.unique_id == "123-so2-index" - - state = hass.states.get("sensor.home_air_quality_index") - assert state - assert state.state == "good" - assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION - assert state.attributes.get(ATTR_STATE_CLASS) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_bad", - "bad", - "sufficient", - "moderate", - "good", - "very_good", - ] - - entry = entity_registry.async_get("sensor.home_air_quality_index") - assert entry - assert entry.unique_id == "123-aqi" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: diff --git a/tests/components/google_tasks/conftest.py b/tests/components/google_tasks/conftest.py index 87ddb2ed81d..7db78af6232 100644 --- a/tests/components/google_tasks/conftest.py +++ b/tests/components/google_tasks/conftest.py @@ -54,6 +54,7 @@ def mock_config_entry(token_entry: dict[str, Any]) -> MockConfigEntry: """Fixture for a config entry.""" return MockConfigEntry( domain=DOMAIN, + unique_id="123", data={ "auth_implementation": DOMAIN, "token": token_entry, diff --git a/tests/components/google_tasks/test_config_flow.py b/tests/components/google_tasks/test_config_flow.py index 24801959674..5b2d4f11fee 100644 --- a/tests/components/google_tasks/test_config_flow.py +++ b/tests/components/google_tasks/test_config_flow.py @@ -1,9 +1,11 @@ """Test the Google Tasks config flow.""" -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import Mock, patch from googleapiclient.errors import HttpError from httplib2 import Response +import pytest from homeassistant import config_entries from homeassistant.components.google_tasks.const import ( @@ -15,18 +17,37 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker CLIENT_ID = "1234" CLIENT_SECRET = "5678" +@pytest.fixture +def user_identifier() -> str: + """Return a unique user ID.""" + return "123" + + +@pytest.fixture +def setup_userinfo(user_identifier: str) -> Generator[Mock, None, None]: + """Set up userinfo.""" + with patch("homeassistant.components.google_tasks.config_flow.build") as mock: + mock.return_value.userinfo.return_value.get.return_value.execute.return_value = { + "id": user_identifier, + "name": "Test Name", + } + yield mock + + async def test_full_flow( hass: HomeAssistant, hass_client_no_auth, - aioclient_mock, + aioclient_mock: AiohttpClientMocker, current_request_with_host, setup_credentials, + setup_userinfo, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -44,7 +65,8 @@ async def test_full_flow( f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" - "&scope=https://www.googleapis.com/auth/tasks" + "&scope=https://www.googleapis.com/auth/tasks+" + "https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -63,14 +85,13 @@ async def test_full_flow( }, ) - with ( - patch( - "homeassistant.components.google_tasks.async_setup_entry", return_value=True - ) as mock_setup, - patch("homeassistant.components.google_tasks.config_flow.build"), - ): + with patch( + "homeassistant.components.google_tasks.async_setup_entry", return_value=True + ) as mock_setup: result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == "123" + assert result["result"].title == "Test Name" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup.mock_calls) == 1 @@ -78,9 +99,10 @@ async def test_full_flow( async def test_api_not_enabled( hass: HomeAssistant, hass_client_no_auth, - aioclient_mock, + aioclient_mock: AiohttpClientMocker, current_request_with_host, setup_credentials, + setup_userinfo, ) -> None: """Check flow aborts if api is not enabled.""" result = await hass.config_entries.flow.async_init( @@ -98,7 +120,8 @@ async def test_api_not_enabled( f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" - "&scope=https://www.googleapis.com/auth/tasks" + "&scope=https://www.googleapis.com/auth/tasks+" + "https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -137,9 +160,10 @@ async def test_api_not_enabled( async def test_general_exception( hass: HomeAssistant, hass_client_no_auth, - aioclient_mock, + aioclient_mock: AiohttpClientMocker, current_request_with_host, setup_credentials, + setup_userinfo, ) -> None: """Check flow aborts if exception happens.""" result = await hass.config_entries.flow.async_init( @@ -157,7 +181,8 @@ async def test_general_exception( f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" - "&scope=https://www.googleapis.com/auth/tasks" + "&scope=https://www.googleapis.com/auth/tasks+" + "https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -184,3 +209,108 @@ async def test_general_exception( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" + + +@pytest.mark.parametrize( + ("user_identifier", "abort_reason", "resulting_access_token", "starting_unique_id"), + [ + ( + "123", + "reauth_successful", + "updated-access-token", + "123", + ), + ( + "123", + "reauth_successful", + "updated-access-token", + None, + ), + ( + "345", + "wrong_account", + "mock-access", + "123", + ), + ], +) +async def test_reauth( + hass: HomeAssistant, + hass_client_no_auth, + aioclient_mock: AiohttpClientMocker, + current_request_with_host, + setup_credentials, + setup_userinfo, + user_identifier: str, + abort_reason: str, + resulting_access_token: str, + starting_unique_id: str | None, +) -> None: + """Test the re-authentication case updates the correct config entry.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=starting_unique_id, + data={ + "token": { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access", + } + }, + ) + config_entry.add_to_hass(hass) + + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/tasks+" + "https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "updated-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.google_tasks.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] == "abort" + assert result["reason"] == abort_reason + + assert config_entry.unique_id == "123" + assert "token" in config_entry.data + # Verify access token is refreshed + assert config_entry.data["token"]["access_token"] == resulting_access_token + assert config_entry.data["token"]["refresh_token"] == "mock-refresh-token" diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 0abfce87133..1fe0e4a0c36 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -68,7 +68,7 @@ async def test_expired_token_refresh_success( ( time.time() - 3600, http.HTTPStatus.UNAUTHORIZED, - ConfigEntryState.SETUP_RETRY, # Will trigger reauth in the future + ConfigEntryState.SETUP_ERROR, ), ( time.time() - 3600, diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index 0f8d487b340..d3f2747933e 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from collections import OrderedDict from typing import Any from unittest.mock import patch @@ -15,11 +16,15 @@ from homeassistant.const import ( ATTR_ICON, EVENT_HOMEASSISTANT_START, SERVICE_RELOAD, + STATE_CLOSED, STATE_HOME, + STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, + STATE_OPEN, STATE_UNKNOWN, + STATE_UNLOCKED, ) from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import entity_registry as er @@ -603,6 +608,108 @@ async def test_is_on(hass: HomeAssistant) -> None: assert not group.is_on(hass, "non.existing") +@pytest.mark.parametrize( + ( + "domains", + "states_old", + "states_new", + "state_ison_group_old", + "state_ison_group_new", + ), + [ + ( + ("light", "light"), + (STATE_ON, STATE_OFF), + (STATE_OFF, STATE_OFF), + (STATE_ON, True), + (STATE_OFF, False), + ), + ( + ("cover", "cover"), + (STATE_OPEN, STATE_CLOSED), + (STATE_CLOSED, STATE_CLOSED), + (STATE_OPEN, True), + (STATE_CLOSED, False), + ), + ( + ("lock", "lock"), + (STATE_UNLOCKED, STATE_LOCKED), + (STATE_LOCKED, STATE_LOCKED), + (STATE_UNLOCKED, True), + (STATE_LOCKED, False), + ), + ( + ("cover", "lock"), + (STATE_OPEN, STATE_LOCKED), + (STATE_CLOSED, STATE_LOCKED), + (STATE_ON, True), + (STATE_OFF, False), + ), + ( + ("cover", "lock"), + (STATE_OPEN, STATE_UNLOCKED), + (STATE_CLOSED, STATE_LOCKED), + (STATE_ON, True), + (STATE_OFF, False), + ), + ( + ("cover", "lock", "light"), + (STATE_OPEN, STATE_LOCKED, STATE_ON), + (STATE_CLOSED, STATE_LOCKED, STATE_OFF), + (STATE_ON, True), + (STATE_OFF, False), + ), + ], +) +async def test_is_on_and_state_mixed_domains( + hass: HomeAssistant, + domains: tuple[str, ...], + states_old: tuple[str, ...], + states_new: tuple[str, ...], + state_ison_group_old: tuple[str, bool], + state_ison_group_new: tuple[str, bool], +) -> None: + """Test is_on method with mixed domains.""" + count = len(domains) + entity_ids = [f"{domains[index]}.test_{index}" for index in range(count)] + for index in range(count): + hass.states.async_set(entity_ids[index], states_old[index]) + + assert not group.is_on(hass, "group.none") + await asyncio.gather( + *[async_setup_component(hass, domain, {}) for domain in set(domains)] + ) + assert await async_setup_component(hass, "group", {}) + await hass.async_block_till_done() + + test_group = await group.Group.async_create_group( + hass, + "init_group", + created_by_service=True, + entity_ids=entity_ids, + icon=None, + mode=None, + object_id=None, + order=None, + ) + await hass.async_block_till_done() + + # Assert on old state + state = hass.states.get(test_group.entity_id) + assert state is not None + assert state.state == state_ison_group_old[0] + assert group.is_on(hass, test_group.entity_id) == state_ison_group_old[1] + + # Switch and assert on new state + for index in range(count): + hass.states.async_set(entity_ids[index], states_new[index]) + await hass.async_block_till_done() + state = hass.states.get(test_group.entity_id) + assert state is not None + assert state.state == state_ison_group_new[0] + assert group.is_on(hass, test_group.entity_id) == state_ison_group_new[1] + + async def test_reloading_groups(hass: HomeAssistant) -> None: """Test reloading the group config.""" assert await async_setup_component( diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index 9a7d8ef110a..4b4592c2104 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -1376,9 +1376,12 @@ async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None ] } - with patch( - "homeassistant.components.recorder.history.state_changes_during_period", - _fake_states, + with ( + patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ), + freeze_time(start_time), ): await async_setup_component( hass, diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index 9647cef4721..c34e3ebe186 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -1,23 +1,31 @@ """Test the Home Assistant SkyConnect config flow.""" -from collections.abc import Generator -import copy -from unittest.mock import Mock, patch +import asyncio +from collections.abc import Awaitable, Callable +from typing import Any +from unittest.mock import AsyncMock, Mock, call, patch import pytest +from universal_silabs_flasher.const import ApplicationType -from homeassistant.components import homeassistant_sky_connect, usb +from homeassistant.components import usb +from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + get_multiprotocol_addon_manager, +) +from homeassistant.components.homeassistant_sky_connect.config_flow import ( + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, +) from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.components.zha import ( - CONF_DEVICE_PATH, - DOMAIN as ZHA_DOMAIN, - RadioType, +from homeassistant.components.homeassistant_sky_connect.util import ( + get_otbr_addon_manager, + get_zigbee_flasher_addon_manager, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, MockModule, mock_integration +from tests.common import MockConfigEntry USB_DATA_SKY = usb.UsbServiceInfo( device="/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", @@ -38,340 +46,840 @@ USB_DATA_ZBT1 = usb.UsbServiceInfo( ) -@pytest.fixture(autouse=True) -def config_flow_handler(hass: HomeAssistant) -> Generator[None, None, None]: - """Fixture for a test config flow.""" - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.WaitingAddonManager.async_wait_until_addon_state" - ): - yield +def delayed_side_effect() -> Callable[..., Awaitable[None]]: + """Slows down eager tasks by delaying for an event loop tick.""" + + async def side_effect(*args: Any, **kwargs: Any) -> None: + await asyncio.sleep(0) + + return side_effect @pytest.mark.parametrize( - ("usb_data", "title"), + ("usb_data", "model"), [ (USB_DATA_SKY, "Home Assistant SkyConnect"), (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -async def test_config_flow( - usb_data: usb.UsbServiceInfo, title: str, hass: HomeAssistant +async def test_config_flow_zigbee( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test the config flow for SkyConnect.""" - with patch( - "homeassistant.components.homeassistant_sky_connect.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - expected_data = { - "device": usb_data.device, - "vid": usb_data.vid, - "pid": usb_data.pid, - "serial_number": usb_data.serial_number, - "manufacturer": usb_data.manufacturer, - "description": usb_data.description, - } - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == title - assert result["data"] == expected_data - assert result["options"] == {} - assert len(mock_setup_entry.mock_calls) == 1 - - config_entry = hass.config_entries.async_entries(DOMAIN)[0] - assert config_entry.data == expected_data - assert config_entry.options == {} - assert config_entry.title == title - assert ( - config_entry.unique_id - == f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data ) + # First step is confirmation, we haven't probed the firmware yet + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["description_placeholders"]["firmware_type"] == "unknown" + assert result["description_placeholders"]["model"] == model -@pytest.mark.parametrize( - ("usb_data", "title"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_multiple_entries( - usb_data: usb.UsbServiceInfo, title: str, hass: HomeAssistant -) -> None: - """Test multiple entries are allowed.""" - # Setup an existing config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title=title, - unique_id=f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}", - ) - config_entry.add_to_hass(hass) - - usb_data = copy.copy(usb_data) - usb_data.serial_number = "bla_serial_number_2" - + # Next, we probe the firmware with patch( - "homeassistant.components.homeassistant_sky_connect.async_setup_entry", - return_value=True, + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, # Ensure we re-install it ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} ) - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" - -@pytest.mark.parametrize( - ("usb_data", "title"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_update_device( - usb_data: usb.UsbServiceInfo, title: str, hass: HomeAssistant -) -> None: - """Test updating device path.""" - # Setup an existing config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title=title, - unique_id=f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}", + # Set up Zigbee firmware + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() ) - config_entry.add_to_hass(hass) - - usb_data = copy.copy(usb_data) - usb_data.device = "bla_device_2" - - with patch( - "homeassistant.components.homeassistant_sky_connect.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - assert await hass.config_entries.async_setup(config_entry.entry_id) - assert len(mock_setup_entry.mock_calls) == 1 with ( patch( - "homeassistant.components.homeassistant_sky_connect.async_setup_entry", - return_value=True, - ) as mock_setup_entry, + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), patch( - "homeassistant.components.homeassistant_sky_connect.async_unload_entry", - wraps=homeassistant_sky_connect.async_unload_entry, - ) as mock_unload_entry, + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_unload_entry.mock_calls) == 1 + # Pick the menu option: we are now installing the addon + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + await hass.async_block_till_done(wait_background_tasks=True) -@pytest.mark.parametrize( - ("usb_data", "title"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant ZBT-1"), - ], -) -async def test_option_flow_install_multi_pan_addon( - usb_data: usb.UsbServiceInfo, - title: str, - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, -) -> None: - """Test installing the multi pan addon.""" - assert await async_setup_component(hass, "usb", {}) - mock_integration(hass, MockModule("hassio")) + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] - # Setup the config entry - config_entry = MockConfigEntry( - data={ - "device": usb_data.device, - "vid": usb_data.vid, - "pid": usb_data.pid, - "serial_number": usb_data.serial_number, - "manufacturer": usb_data.manufacturer, - "description": usb_data.description, - }, - domain=DOMAIN, - options={}, - title=title, - unique_id=f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}", - ) - config_entry.add_to_hass(hass) + await hass.async_block_till_done(wait_background_tasks=True) - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ): - result = await hass.config_entries.options.async_init(config_entry.entry_id) + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" + assert result["step_id"] == "confirm_zigbee" - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": usb_data.device, - "baudrate": "115200", - "flow_control": True, - } - }, - ) - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } -def mock_detect_radio_type(radio_type=RadioType.ezsp, ret=True): - """Mock `detect_radio_type` that just sets the appropriate attributes.""" - - async def detect(self): - self.radio_type = radio_type - self.device_settings = radio_type.controller.SCHEMA_DEVICE( - {CONF_DEVICE_PATH: self.device_path} - ) - - return ret - - return detect + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" @pytest.mark.parametrize( - ("usb_data", "title"), + ("usb_data", "model"), [ (USB_DATA_SKY, "Home Assistant SkyConnect"), (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -@patch( - "homeassistant.components.zha.radio_manager.ZhaRadioManager.detect_radio_type", - mock_detect_radio_type(), -) -async def test_option_flow_install_multi_pan_addon_zha( - usb_data: usb.UsbServiceInfo, - title: str, - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, +async def test_config_flow_zigbee_skip_step_if_installed( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: - """Test installing the multi pan addon when a zha config entry exists.""" - assert await async_setup_component(hass, "usb", {}) - mock_integration(hass, MockModule("hassio")) + """Test the config flow for SkyConnect, skip installing the addon if necessary.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) - # Setup the config entry - config_entry = MockConfigEntry( - data={ - "device": usb_data.device, - "vid": usb_data.vid, - "pid": usb_data.pid, - "serial_number": usb_data.serial_number, - "manufacturer": usb_data.manufacturer, - "description": usb_data.description, - }, - domain=DOMAIN, + # First step is confirmation, we haven't probed the firmware yet + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["description_placeholders"]["firmware_type"] == "unknown" + assert result["description_placeholders"]["model"] == model + + # Next, we probe the firmware + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, # Ensure we re-install it + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + + # Set up Zigbee firmware + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Pick the menu option: we skip installation, instead we directly run it + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + # Uninstall the addon + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # Done + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the config flow for SkyConnect.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + # First step is confirmation, we haven't probed the firmware yet + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + assert result["description_placeholders"]["firmware_type"] == "unknown" + assert result["description_placeholders"]["model"] == model + + # Next, we probe the firmware + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + + # Set up Thread firmware + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "spinel", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_already_installed( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the Thread config flow for SkyConnect, addon is already installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, options={}, - title=title, - unique_id=f"{usb_data.vid}:{usb_data.pid}_{usb_data.serial_number}_{usb_data.manufacturer}_{usb_data.description}", + state=AddonState.NOT_RUNNING, + update_available=False, + version=None, + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_not_hassio( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test when the stick is used with a non-hassio setup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=False, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_zigbee_to_thread( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow for SkyConnect, migrating Zigbee to Thread.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) - zha_config_entry = MockConfigEntry( - data={"device": {"path": usb_data.device}, "radio_type": "ezsp"}, - domain=ZHA_DOMAIN, - options={}, - title="Yellow", - ) - zha_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + assert result["description_placeholders"]["model"] == model + + # Pick Thread + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "spinel" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_thread_to_zigbee( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow for SkyConnect, migrating Thread to Zigbee.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "spinel", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert result["description_placeholders"]["model"] == model + + # Set up Zigbee firmware + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + + # OTBR is not installed + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + + # Pick the menu option: we are now installing the addon + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": usb_data.device, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "ezsp" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_SKY, "Home Assistant SkyConnect"), + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_multipan_uninstall( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test options flow for when multi-PAN firmware is installed.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "cpc", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # Multi-PAN addon is running + mock_multipan_manager = Mock(spec_set=await get_multiprotocol_addon_manager(hass)) + mock_multipan_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": usb_data.device}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_multiprotocol_addon_manager", + return_value=mock_multipan_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", + return_value=True, + ), ): result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": usb_data.device, - "baudrate": "115200", - "flow_control": True, - } - }, - ) - # Check the ZHA config entry data is updated - assert zha_config_entry.data == { - "device": { - "path": "socket://core-silabs-multiprotocol:9999", - "baudrate": 115200, - "flow_control": None, - }, - "radio_type": "ezsp", - } - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "addon_menu" + assert "uninstall_addon" in result["menu_options"] diff --git a/tests/components/homeassistant_sky_connect/test_config_flow_failures.py b/tests/components/homeassistant_sky_connect/test_config_flow_failures.py new file mode 100644 index 00000000000..128c812272f --- /dev/null +++ b/tests/components/homeassistant_sky_connect/test_config_flow_failures.py @@ -0,0 +1,920 @@ +"""Test the Home Assistant SkyConnect config flow failure cases.""" + +from unittest.mock import AsyncMock, Mock, patch + +import pytest +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components import usb +from homeassistant.components.hassio.addon_manager import ( + AddonError, + AddonInfo, + AddonState, +) +from homeassistant.components.homeassistant_sky_connect.config_flow import ( + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, +) +from homeassistant.components.homeassistant_sky_connect.const import DOMAIN +from homeassistant.components.homeassistant_sky_connect.util import ( + get_otbr_addon_manager, + get_zigbee_flasher_addon_manager, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .test_config_flow import USB_DATA_ZBT1, delayed_side_effect + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_cannot_probe_firmware( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when firmware cannot be probed.""" + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=None, + ): + # Start the flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + # Probing fails + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "unsupported_firmware" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_not_hassio_wrong_firmware( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test when the stick is used with a non-hassio setup but the firmware is bad.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=False, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "not_hassio" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_addon_already_running( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon is already running.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + # Cannot get addon info + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_already_running" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_addon_info_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.side_effect = AddonError() + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + # Cannot get addon info + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_info_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_addon_install_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=AddonError() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + # Cannot install addon + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_install_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_addon_set_config_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_set_addon_options = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_set_config_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_run_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon fails to run.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_start_addon_waiting = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_start_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_zigbee_flasher_uninstall_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon uninstall fails.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.SPINEL, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=AddonError() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + # Uninstall failure isn't critical + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_not_hassio( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test when the stick is used with a non-hassio setup and Thread is selected.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=False, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "not_hassio_thread" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_info_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.side_effect = AddonError() + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + # Cannot get addon info + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_info_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_already_running( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when the Thread addon is already running.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + # Cannot install addon + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "otbr_addon_already_running" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_install_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be installed.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + # Cannot install addon + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_install_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_addon_set_config_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon cannot be configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_set_addon_options = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_set_config_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_flasher_run_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon fails to run.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock(side_effect=AddonError()) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "addon_start_failed" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_config_flow_thread_flasher_uninstall_fails( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test failure case when flasher addon uninstall fails.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "usb"}, data=usb_data + ) + + with patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ) + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=AddonError() + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + + # Uninstall failure isn't critical + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_zigbee_to_thread_zha_configured( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow migration failure, ZHA using the stick.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # Set up ZHA as well + zha_config_entry = MockConfigEntry( + domain="zha", + data={"device": {"path": usb_data.device}}, + ) + zha_config_entry.add_to_hass(hass) + + # Confirm options flow + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + # Pick Thread + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "zha_still_using_stick" + + +@pytest.mark.parametrize( + ("usb_data", "model"), + [ + (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), + ], +) +async def test_options_flow_thread_to_zigbee_otbr_configured( + usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant +) -> None: + """Test the options flow migration failure, OTBR still using the stick.""" + config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "spinel", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # Confirm options flow + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + # Pick Zigbee + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": usb_data.device}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "otbr_still_using_stick" diff --git a/tests/components/homeassistant_sky_connect/test_hardware.py b/tests/components/homeassistant_sky_connect/test_hardware.py index 6b283378045..888ed27a3c0 100644 --- a/tests/components/homeassistant_sky_connect/test_hardware.py +++ b/tests/components/homeassistant_sky_connect/test_hardware.py @@ -1,7 +1,5 @@ """Test the Home Assistant SkyConnect hardware platform.""" -from unittest.mock import patch - from homeassistant.components.homeassistant_sky_connect.const import DOMAIN from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, HomeAssistant from homeassistant.setup import async_setup_component @@ -15,7 +13,8 @@ CONFIG_ENTRY_DATA = { "pid": "EA60", "serial_number": "9e2adbd75b8beb119fe564a0f320645d", "manufacturer": "Nabu Casa", - "description": "SkyConnect v1.0", + "product": "SkyConnect v1.0", + "firmware": "ezsp", } CONFIG_ENTRY_DATA_2 = { @@ -24,7 +23,8 @@ CONFIG_ENTRY_DATA_2 = { "pid": "EA60", "serial_number": "9e2adbd75b8beb119fe564a0f320645d", "manufacturer": "Nabu Casa", - "description": "Home Assistant Connect ZBT-1", + "product": "Home Assistant Connect ZBT-1", + "firmware": "ezsp", } @@ -42,22 +42,24 @@ async def test_hardware_info( options={}, title="Home Assistant SkyConnect", unique_id="unique_1", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + config_entry_2 = MockConfigEntry( data=CONFIG_ENTRY_DATA_2, domain=DOMAIN, options={}, title="Home Assistant Connect ZBT-1", unique_id="unique_2", + version=1, + minor_version=2, ) config_entry_2.add_to_hass(hass) - with patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + + assert await hass.config_entries.async_setup(config_entry_2.entry_id) client = await hass_ws_client(hass) diff --git a/tests/components/homeassistant_sky_connect/test_init.py b/tests/components/homeassistant_sky_connect/test_init.py index a6dd5100d7e..88b57f2dd64 100644 --- a/tests/components/homeassistant_sky_connect/test_init.py +++ b/tests/components/homeassistant_sky_connect/test_init.py @@ -1,377 +1,56 @@ """Test the Home Assistant SkyConnect integration.""" -from collections.abc import Generator -from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import patch -import pytest +from universal_silabs_flasher.const import ApplicationType -from homeassistant.components import zha -from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, HomeAssistant -from homeassistant.setup import async_setup_component +from homeassistant.components.homeassistant_sky_connect.util import FirmwareGuess +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -CONFIG_ENTRY_DATA = { - "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", - "vid": "10C4", - "pid": "EA60", - "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", - "manufacturer": "Nabu Casa", - "description": "SkyConnect v1.0", -} +async def test_config_entry_migration_v2(hass: HomeAssistant) -> None: + """Test migrating config entries from v1 to v2 format.""" -@pytest.fixture(autouse=True) -def disable_usb_probing() -> Generator[None, None, None]: - """Disallow touching of system USB devices during unit tests.""" - with patch("homeassistant.components.usb.comports", return_value=[]): - yield - - -@pytest.fixture -def mock_zha_config_flow_setup() -> Generator[None, None, None]: - """Mock the radio connection and probing of the ZHA config flow.""" - - def mock_probe(config: dict[str, Any]) -> None: - # The radio probing will return the correct baudrate - return {**config, "baudrate": 115200} - - mock_connect_app = MagicMock() - mock_connect_app.__aenter__.return_value.backups.backups = [] - - with ( - patch( - "bellows.zigbee.application.ControllerApplication.probe", - side_effect=mock_probe, - ), - patch( - "homeassistant.components.zha.radio_manager.ZhaRadioManager.connect_zigpy_app", - return_value=mock_connect_app, - ), - ): - yield - - -@pytest.mark.parametrize( - ("onboarded", "num_entries", "num_flows"), [(False, 1, 0), (True, 0, 1)] -) -async def test_setup_entry( - mock_zha_config_flow_setup, - hass: HomeAssistant, - addon_store_info, - onboarded, - num_entries, - num_flows, -) -> None: - """Test setup of a config entry, including setup of zha.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - # Setup the config entry config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ) as mock_is_plugged_in, - patch( - "homeassistant.components.onboarding.async_is_onboarded", - return_value=onboarded, - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - - matcher = mock_is_plugged_in.mock_calls[0].args[1] - assert matcher["vid"].isupper() - assert matcher["pid"].isupper() - assert matcher["serial_number"].islower() - assert matcher["manufacturer"].islower() - assert matcher["description"].islower() - - # Finish setting up ZHA - if num_entries > 0: - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - assert len(hass.config_entries.flow.async_progress_by_handler("zha")) == num_flows - assert len(hass.config_entries.async_entries("zha")) == num_entries - - -async def test_setup_zha( - mock_zha_config_flow_setup, hass: HomeAssistant, addon_store_info -) -> None: - """Test zha gets the right config.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ) as mock_is_plugged_in, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - # Finish setting up ZHA - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": CONFIG_ENTRY_DATA["device"], + unique_id="some_unique_id", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "description": "SkyConnect v1.0", }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == CONFIG_ENTRY_DATA["description"] - - -async def test_setup_zha_multipan( - hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - addon_info.return_value["options"]["device"] = CONFIG_ENTRY_DATA["device"] - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", + version=1, ) + config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ) as mock_is_plugged_in, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": "socket://core-silabs-multiprotocol:9999", - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == "SkyConnect Multiprotocol" - - -async def test_setup_zha_multipan_other_device( - mock_zha_config_flow_setup, hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - addon_info.return_value["options"]["device"] = "/dev/not_our_sky_connect" - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ) as mock_is_plugged_in, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": CONFIG_ENTRY_DATA["device"], - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == CONFIG_ENTRY_DATA["description"] - - -async def test_setup_entry_wait_usb(hass: HomeAssistant) -> None: - """Test setup of a config entry when the dongle is not plugged in.""" - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) with patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=False, - ) as mock_is_plugged_in: + "homeassistant.components.homeassistant_sky_connect.guess_firmware_type", + return_value=FirmwareGuess( + is_running=True, + firmware_type=ApplicationType.SPINEL, + source="otbr", + ), + ): await hass.config_entries.async_setup(config_entry.entry_id) - assert config_entry.state is ConfigEntryState.LOADED - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - # USB discovery starts, config entry should be removed - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - await hass.async_block_till_done() - assert len(mock_is_plugged_in.mock_calls) == 1 - assert len(hass.config_entries.async_entries(DOMAIN)) == 0 + assert config_entry.version == 1 + assert config_entry.minor_version == 2 + assert config_entry.data == { + "description": "SkyConnect v1.0", + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "product": "SkyConnect v1.0", # `description` has been copied to `product` + "firmware": "spinel", # new key + } -async def test_setup_entry_addon_info_fails( - hass: HomeAssistant, addon_store_info -) -> None: - """Test setup of a config entry when fetching addon info fails.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - addon_store_info.side_effect = HassioAPIError("Boom") - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ), - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_entry_addon_not_running( - hass: HomeAssistant, addon_installed, start_addon -) -> None: - """Test the addon is started if it is not running.""" - assert await async_setup_component(hass, "usb", {}) - hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) - - # Setup the config entry - config_entry = MockConfigEntry( - data=CONFIG_ENTRY_DATA, - domain=DOMAIN, - options={}, - title="Home Assistant SkyConnect", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.usb.async_is_plugged_in", - return_value=True, - ), - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.SETUP_RETRY - start_addon.assert_called_once() + await hass.config_entries.async_unload(config_entry.entry_id) diff --git a/tests/components/homeassistant_sky_connect/test_util.py b/tests/components/homeassistant_sky_connect/test_util.py new file mode 100644 index 00000000000..12ba352eb16 --- /dev/null +++ b/tests/components/homeassistant_sky_connect/test_util.py @@ -0,0 +1,203 @@ +"""Test SkyConnect utilities.""" + +from unittest.mock import AsyncMock, patch + +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components.hassio import AddonError, AddonInfo, AddonState +from homeassistant.components.homeassistant_sky_connect.const import ( + DOMAIN, + HardwareVariant, +) +from homeassistant.components.homeassistant_sky_connect.util import ( + FirmwareGuess, + get_hardware_variant, + get_usb_service_info, + get_zha_device_path, + guess_firmware_type, +) +from homeassistant.components.usb import UsbServiceInfo +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +SKYCONNECT_CONFIG_ENTRY = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "product": "SkyConnect v1.0", + "firmware": "ezsp", + }, + version=2, +) + +CONNECT_ZBT1_CONFIG_ENTRY = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "product": "Home Assistant Connect ZBT-1", + "firmware": "ezsp", + }, + version=2, +) + +ZHA_CONFIG_ENTRY = MockConfigEntry( + domain="zha", + unique_id="some_unique_id", + data={ + "device": { + "path": "/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_3c0ed67c628beb11b1cd64a0f320645d-if00-port0", + "baudrate": 115200, + "flow_control": None, + }, + "radio_type": "ezsp", + }, + version=4, +) + + +def test_get_usb_service_info() -> None: + """Test `get_usb_service_info` conversion.""" + assert get_usb_service_info(SKYCONNECT_CONFIG_ENTRY) == UsbServiceInfo( + device=SKYCONNECT_CONFIG_ENTRY.data["device"], + vid=SKYCONNECT_CONFIG_ENTRY.data["vid"], + pid=SKYCONNECT_CONFIG_ENTRY.data["pid"], + serial_number=SKYCONNECT_CONFIG_ENTRY.data["serial_number"], + manufacturer=SKYCONNECT_CONFIG_ENTRY.data["manufacturer"], + description=SKYCONNECT_CONFIG_ENTRY.data["product"], + ) + + +def test_get_hardware_variant() -> None: + """Test `get_hardware_variant` extraction.""" + assert get_hardware_variant(SKYCONNECT_CONFIG_ENTRY) == HardwareVariant.SKYCONNECT + assert ( + get_hardware_variant(CONNECT_ZBT1_CONFIG_ENTRY) == HardwareVariant.CONNECT_ZBT1 + ) + + +def test_get_zha_device_path() -> None: + """Test extracting the ZHA device path from its config entry.""" + assert ( + get_zha_device_path(ZHA_CONFIG_ENTRY) == ZHA_CONFIG_ENTRY.data["device"]["path"] + ) + + +async def test_guess_firmware_type_unknown(hass: HomeAssistant) -> None: + """Test guessing the firmware type.""" + + assert (await guess_firmware_type(hass, "/dev/missing")) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" + ) + + +async def test_guess_firmware_type(hass: HomeAssistant) -> None: + """Test guessing the firmware.""" + path = ZHA_CONFIG_ENTRY.data["device"]["path"] + + ZHA_CONFIG_ENTRY.add_to_hass(hass) + + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.NOT_LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="zha" + ) + + # When ZHA is running, we indicate as such when guessing + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager = AsyncMock() + mock_multipan_addon_manager = AsyncMock() + + with ( + patch( + "homeassistant.components.homeassistant_sky_connect.util.is_hassio", + return_value=True, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.util.get_otbr_addon_manager", + return_value=mock_otbr_addon_manager, + ), + patch( + "homeassistant.components.homeassistant_sky_connect.util.get_multiprotocol_addon_manager", + return_value=mock_multipan_addon_manager, + ), + ): + mock_otbr_addon_manager.async_get_addon_info.side_effect = AddonError() + mock_multipan_addon_manager.async_get_addon_info.side_effect = AddonError() + + # Hassio errors are ignored and we still go with ZHA + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.side_effect = None + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": "/some/other/device"}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will prefer ZHA, as it is running (and actually pointing to the device) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will still prefer ZHA, as it is the one actually running + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Finally, ZHA loses out to OTBR + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.SPINEL, source="otbr" + ) + + mock_multipan_addon_manager.async_get_addon_info.side_effect = None + mock_multipan_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Which will lose out to multi-PAN + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.CPC, source="multiprotocol" + ) diff --git a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json index 83b5f8993bc..922601ca733 100644 --- a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json +++ b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json @@ -4791,6 +4791,59 @@ "type": "HEATING_THERMOSTAT", "updateState": "UP_TO_DATE" }, + "3014F71100000000ETRV0013": { + "automaticValveAdaptionNeeded": false, + "availableFirmwareVersion": "2.0.2", + "connectionType": "HMIP_RF", + "firmwareVersion": "2.0.2", + "firmwareVersionInteger": 131074, + "functionalChannels": { + "0": { + "configPending": false, + "deviceId": "3014F71100000000ETRV0013", + "dutyCycle": false, + "functionalChannelType": "DEVICE_OPERATIONLOCK", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000014"], + "index": 0, + "label": "", + "lowBat": false, + "operationLockActive": false, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -58, + "rssiPeerValue": -58, + "unreach": false, + "supportedOptionalFeatures": {} + }, + "1": { + "deviceId": "3014F71100000000ETRV0013", + "functionalChannelType": "HEATING_THERMOSTAT_CHANNEL", + "groupIndex": 1, + "groups": ["00000000-0000-0000-0005-000000000019"], + "index": 1, + "label": "", + "valveActualTemperature": 20.0, + "setPointTemperature": 5.0, + "temperatureOffset": 0.0, + "valvePosition": 0.0, + "valveState": "ADAPTION_DONE" + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F71100000000ETRV0013", + "label": "Heizkörperthermostat4", + "lastStatusUpdate": 1524514007132, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manufacturerCode": 1, + "modelId": 269, + "modelType": "HMIP-eTRV", + "oem": "eQ-3", + "permanentlyReachable": true, + "serializedGlobalTradeItemNumber": "3014F71100000000ETRV0013", + "type": "HEATING_THERMOSTAT", + "updateState": "UP_TO_DATE" + }, "3014F7110000000000000014": { "automaticValveAdaptionNeeded": false, "availableFirmwareVersion": "2.0.2", @@ -8535,6 +8588,297 @@ "windowOpenTemperature": 5.0, "windowState": null }, + "00000000-0000-0000-0005-000000000019": { + "activeProfile": "PROFILE_1", + "actualTemperature": null, + "boostDuration": 15, + "boostMode": false, + "channels": [ + { + "channelIndex": 1, + "deviceId": "3014F71100000000ETRV0013" + } + ], + "controlMode": "AUTOMATIC", + "controllable": true, + "cooling": null, + "coolingAllowed": false, + "coolingIgnored": false, + "dutyCycle": false, + "ecoAllowed": true, + "ecoIgnored": false, + "externalClockCoolingTemperature": 23.0, + "externalClockEnabled": false, + "externalClockHeatingTemperature": 19.0, + "floorHeatingMode": "FLOOR_HEATING_STANDARD", + "homeId": "00000000-0000-0000-0000-000000000001", + "humidity": null, + "humidityLimitEnabled": true, + "humidityLimitValue": 60, + "id": "00000000-0000-0000-0005-000000000019", + "label": "Vorzimmer3", + "lastSetPointReachedTimestamp": 1557767559939, + "lastSetPointUpdatedTimestamp": 1557767559939, + "lastStatusUpdate": 1524514007132, + "lowBat": false, + "maxTemperature": 30.0, + "metaGroupId": "00000000-0000-0000-0000-000000000014", + "minTemperature": 5.0, + "partyMode": false, + "profiles": { + "PROFILE_1": { + "enabled": true, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_1", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000058", + "visible": true + }, + "PROFILE_2": { + "enabled": true, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_2", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000059", + "visible": true + }, + "PROFILE_3": { + "enabled": true, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_3", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000060", + "visible": false + }, + "PROFILE_4": { + "enabled": false, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_4", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000061", + "visible": true + }, + "PROFILE_5": { + "enabled": false, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_5", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000062", + "visible": false + }, + "PROFILE_6": { + "enabled": false, + "groupId": "00000000-0000-0000-0005-000000000019", + "index": "PROFILE_6", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000063", + "visible": false + } + }, + "setPointTemperature": 5.0, + "type": "HEATING", + "unreach": false, + "valvePosition": 0.0, + "valveSilentModeEnabled": false, + "valveSilentModeSupported": false, + "heatingFailureSupported": true, + "windowOpenTemperature": 5.0, + "windowState": null + }, + "00000000-0000-0000-0001-000000000019": { + "activeProfile": "PROFILE_1", + "actualTemperature": null, + "boostDuration": 15, + "boostMode": false, + "channels": [ + { + "channelIndex": 1, + "deviceId": "3014F7110000000000000013" + } + ], + "controlMode": "AUTOMATIC", + "controllable": true, + "cooling": null, + "coolingAllowed": false, + "coolingIgnored": false, + "dutyCycle": false, + "ecoAllowed": true, + "ecoIgnored": false, + "externalClockCoolingTemperature": 23.0, + "externalClockEnabled": false, + "externalClockHeatingTemperature": 19.0, + "floorHeatingMode": "FLOOR_HEATING_STANDARD", + "homeId": "00000000-0000-0000-0000-000000000001", + "humidity": null, + "humidityLimitEnabled": true, + "humidityLimitValue": 60, + "id": "00000000-0000-0000-0001-000000000019", + "label": "Vorzimmer", + "lastSetPointReachedTimestamp": 1557767559939, + "lastSetPointUpdatedTimestamp": 1557767559939, + "lastStatusUpdate": 1524514007132, + "lowBat": false, + "maxTemperature": 30.0, + "metaGroupId": "00000000-0000-0000-0000-000000000014", + "minTemperature": 5.0, + "partyMode": false, + "profiles": { + "PROFILE_1": { + "enabled": true, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_1", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000058", + "visible": true + }, + "PROFILE_2": { + "enabled": true, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_2", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000059", + "visible": false + }, + "PROFILE_3": { + "enabled": true, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_3", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000060", + "visible": false + }, + "PROFILE_4": { + "enabled": false, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_4", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000061", + "visible": true + }, + "PROFILE_5": { + "enabled": false, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_5", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000062", + "visible": false + }, + "PROFILE_6": { + "enabled": false, + "groupId": "00000000-0000-0000-0001-000000000019", + "index": "PROFILE_6", + "name": "", + "profileId": "00000000-0000-0000-0000-000000000063", + "visible": false + } + }, + "setPointTemperature": 5.0, + "type": "HEATING", + "unreach": false, + "valvePosition": 0.0, + "valveSilentModeEnabled": false, + "valveSilentModeSupported": false, + "heatingFailureSupported": true, + "windowOpenTemperature": 5.0, + "windowState": null + }, + "00000000-0000-0001-0001-000000000019": { + "activeProfile": "PROFILE_1", + "actualTemperature": null, + "boostDuration": 15, + "boostMode": false, + "channels": [ + { + "channelIndex": 1, + "deviceId": "3014F7110000000000000013" + } + ], + "controlMode": "AUTOMATIC", + "controllable": true, + "cooling": null, + "coolingAllowed": false, + "coolingIgnored": false, + "dutyCycle": false, + "ecoAllowed": true, + "ecoIgnored": false, + "externalClockCoolingTemperature": 23.0, + "externalClockEnabled": false, + "externalClockHeatingTemperature": 19.0, + "floorHeatingMode": "FLOOR_HEATING_STANDARD", + "homeId": "00000000-0000-0000-0000-000000000001", + "humidity": null, + "humidityLimitEnabled": true, + "humidityLimitValue": 60, + "id": "00000000-0000-0001-0001-000000000019", + "label": "Vorzimmer2", + "lastSetPointReachedTimestamp": 1557767559939, + "lastSetPointUpdatedTimestamp": 1557767559939, + "lastStatusUpdate": 1524514007132, + "lowBat": false, + "maxTemperature": 30.0, + "metaGroupId": "00000000-0000-0000-0000-000000000014", + "minTemperature": 5.0, + "partyMode": false, + "profiles": { + "PROFILE_1": { + "enabled": true, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_1", + "name": "Testprofile", + "profileId": "00000000-0000-0000-0001-000000000058", + "visible": true + }, + "PROFILE_2": { + "enabled": true, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_2", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000059", + "visible": true + }, + "PROFILE_3": { + "enabled": true, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_3", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000060", + "visible": false + }, + "PROFILE_4": { + "enabled": false, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_4", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000061", + "visible": true + }, + "PROFILE_5": { + "enabled": false, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_5", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000062", + "visible": false + }, + "PROFILE_6": { + "enabled": false, + "groupId": "00000000-0000-0001-0001-000000000019", + "index": "PROFILE_6", + "name": "", + "profileId": "00000000-0000-0000-0001-000000000063", + "visible": false + } + }, + "setPointTemperature": 5.0, + "type": "HEATING", + "unreach": false, + "valvePosition": 0.0, + "valveSilentModeEnabled": false, + "valveSilentModeSupported": false, + "heatingFailureSupported": true, + "windowOpenTemperature": 5.0, + "windowState": null + }, "00000000-AAAA-0000-0000-000000000001": { "actualTemperature": 15.4, "channels": [ diff --git a/tests/components/homematicip_cloud/test_climate.py b/tests/components/homematicip_cloud/test_climate.py index 9ede89859dc..f175e2060df 100644 --- a/tests/components/homematicip_cloud/test_climate.py +++ b/tests/components/homematicip_cloud/test_climate.py @@ -1,6 +1,7 @@ """Tests for HomematicIP Cloud climate.""" import datetime +from unittest.mock import patch from homematicip.base.enums import AbsenceType from homematicip.functionalHomes import IndoorClimateHome @@ -15,7 +16,6 @@ from homeassistant.components.climate import ( PRESET_AWAY, PRESET_BOOST, PRESET_ECO, - PRESET_NONE, HVACAction, HVACMode, ) @@ -217,12 +217,14 @@ async def test_hmip_heating_group_heat( ha_state = hass.states.get(entity_id) assert ha_state.state == HVACMode.AUTO + # hvac mode "dry" is not available. expect a valueerror. await hass.services.async_call( "climate", "set_hvac_mode", {"entity_id": entity_id, "hvac_mode": "dry"}, blocking=True, ) + assert len(hmip_device.mock_calls) == service_call_counter + 24 # Only fire event from last async_manipulate_test_data available. assert hmip_device.mock_calls[-1][0] == "fire_update_event" @@ -429,14 +431,95 @@ async def test_hmip_heating_group_heat_with_radiator( assert ha_state.attributes["min_temp"] == 5.0 assert ha_state.attributes["max_temp"] == 30.0 assert ha_state.attributes["temperature"] == 5.0 - assert ha_state.attributes[ATTR_PRESET_MODE] is None + assert ha_state.attributes[ATTR_PRESET_MODE] == "Default" assert ha_state.attributes[ATTR_PRESET_MODES] == [ - PRESET_NONE, PRESET_BOOST, PRESET_ECO, + "Default", ] +async def test_hmip_heating_profile_default_name( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test visible profile 1 without a name should be displayed as 'Default'.""" + entity_id = "climate.vorzimmer3" + entity_name = "Vorzimmer3" + device_model = None + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["Heizkörperthermostat4"], + test_groups=[entity_name], + ) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert hmip_device + assert ha_state.state == HVACMode.AUTO + assert ha_state.attributes[ATTR_PRESET_MODES] == [ + PRESET_BOOST, + PRESET_ECO, + "Default", + "Alternative 1", + ] + + +async def test_hmip_heating_profile_naming( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test Heating Profile Naming.""" + entity_id = "climate.vorzimmer2" + entity_name = "Vorzimmer2" + device_model = None + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["Heizkörperthermostat2"], + test_groups=[entity_name], + ) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert hmip_device + assert ha_state.state == HVACMode.AUTO + assert ha_state.attributes[ATTR_PRESET_MODES] == [ + PRESET_BOOST, + PRESET_ECO, + "Testprofile", + "Alternative 1", + ] + + +async def test_hmip_heating_profile_name_not_in_list( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test set profile when profile is not in available profiles.""" + expected_profile = "Testprofile" + entity_id = "climate.vorzimmer2" + entity_name = "Vorzimmer2" + device_model = None + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["Heizkörperthermostat2"], + test_groups=[entity_name], + ) + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + with patch( + "homeassistant.components.homematicip_cloud.climate.NICE_PROFILE_NAMES", + return_value={}, + ): + await hass.services.async_call( + "climate", + "set_preset_mode", + {"entity_id": entity_id, "preset_mode": expected_profile}, + blocking=True, + ) + + ha_state = hass.states.get(entity_id) + assert ha_state.attributes[ATTR_PRESET_MODE] == expected_profile + + async def test_hmip_climate_services( hass: HomeAssistant, mock_hap_with_service ) -> None: diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 9fc1f518c64..fb7fe7d7deb 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -26,7 +26,7 @@ async def test_hmip_load_all_supported_devices( test_devices=None, test_groups=None ) - assert len(mock_hap.hmip_device_by_entity_id) == 272 + assert len(mock_hap.hmip_device_by_entity_id) == 278 async def test_hmip_remove_device( diff --git a/tests/components/html5/test_notify.py b/tests/components/html5/test_notify.py index 6763708cc38..ec14b38cd69 100644 --- a/tests/components/html5/test_notify.py +++ b/tests/components/html5/test_notify.py @@ -2,7 +2,7 @@ from http import HTTPStatus import json -from unittest.mock import MagicMock, mock_open, patch +from unittest.mock import mock_open, patch from aiohttp.hdrs import AUTHORIZATION @@ -83,166 +83,174 @@ async def mock_client(hass, hass_client, registrations=None): return await hass_client() -class TestHtml5Notify: - """Tests for HTML5 notify platform.""" +async def test_get_service_with_no_json(hass: HomeAssistant): + """Test empty json file.""" + await async_setup_component(hass, "http", {}) + m = mock_open() + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) - def test_get_service_with_no_json(self): - """Test empty json file.""" - hass = MagicMock() + assert service is not None - m = mock_open() - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) - assert service is not None +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_dismissing_message(mock_wp, hass: HomeAssistant): + """Test dismissing message.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - @patch("homeassistant.components.html5.notify.WebPusher") - def test_dismissing_message(self, mock_wp): - """Test dismissing message.""" - hass = MagicMock() - mock_wp().send().status_code = 201 + data = {"device": SUBSCRIPTION_1} - data = {"device": SUBSCRIPTION_1} + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) + assert service is not None - assert service is not None + await service.async_dismiss(target=["device", "non_existing"], data={"tag": "test"}) - service.dismiss(target=["device", "non_existing"], data={"tag": "test"}) + assert len(mock_wp.mock_calls) == 4 - assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"] - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"] + # Call to send + payload = json.loads(mock_wp.mock_calls[3][2]["data"]) - # Call to send - payload = json.loads(mock_wp.mock_calls[3][2]["data"]) + assert payload["dismiss"] is True + assert payload["tag"] == "test" - assert payload["dismiss"] is True - assert payload["tag"] == "test" - @patch("homeassistant.components.html5.notify.WebPusher") - def test_sending_message(self, mock_wp): - """Test sending message.""" - hass = MagicMock() - mock_wp().send().status_code = 201 +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_sending_message(mock_wp, hass: HomeAssistant): + """Test sending message.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - data = {"device": SUBSCRIPTION_1} + data = {"device": SUBSCRIPTION_1} - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - assert service is not None + assert service is not None - service.send_message( - "Hello", target=["device", "non_existing"], data={"icon": "beer.png"} - ) + await service.async_send_message( + "Hello", target=["device", "non_existing"], data={"icon": "beer.png"} + ) - assert len(mock_wp.mock_calls) == 4 + assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"] + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_1["subscription"] - # Call to send - payload = json.loads(mock_wp.mock_calls[3][2]["data"]) + # Call to send + payload = json.loads(mock_wp.mock_calls[3][2]["data"]) - assert payload["body"] == "Hello" - assert payload["icon"] == "beer.png" + assert payload["body"] == "Hello" + assert payload["icon"] == "beer.png" - @patch("homeassistant.components.html5.notify.WebPusher") - def test_fcm_key_include(self, mock_wp): - """Test if the FCM header is included.""" - hass = MagicMock() - mock_wp().send().status_code = 201 - data = {"chrome": SUBSCRIPTION_5} +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_fcm_key_include(mock_wp, hass: HomeAssistant): + """Test if the FCM header is included.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) + data = {"chrome": SUBSCRIPTION_5} - assert service is not None + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - service.send_message("Hello", target=["chrome"]) + assert service is not None - assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + await service.async_send_message("Hello", target=["chrome"]) - # Get the keys passed to the WebPusher's send method - assert mock_wp.mock_calls[3][2]["headers"]["Authorization"] is not None + assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] - @patch("homeassistant.components.html5.notify.WebPusher") - def test_fcm_send_with_unknown_priority(self, mock_wp): - """Test if the gcm_key is only included for GCM endpoints.""" - hass = MagicMock() - mock_wp().send().status_code = 201 + # Get the keys passed to the WebPusher's send method + assert mock_wp.mock_calls[3][2]["headers"]["Authorization"] is not None - data = {"chrome": SUBSCRIPTION_5} - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_fcm_send_with_unknown_priority(mock_wp, hass: HomeAssistant): + """Test if the gcm_key is only included for GCM endpoints.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - assert service is not None + data = {"chrome": SUBSCRIPTION_5} - service.send_message("Hello", target=["chrome"], priority="undefined") + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + assert service is not None - # Get the keys passed to the WebPusher's send method - assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" + await service.async_send_message("Hello", target=["chrome"], priority="undefined") - @patch("homeassistant.components.html5.notify.WebPusher") - def test_fcm_no_targets(self, mock_wp): - """Test if the gcm_key is only included for GCM endpoints.""" - hass = MagicMock() - mock_wp().send().status_code = 201 + assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] - data = {"chrome": SUBSCRIPTION_5} + # Get the keys passed to the WebPusher's send method + assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) - assert service is not None +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_fcm_no_targets(mock_wp, hass: HomeAssistant): + """Test if the gcm_key is only included for GCM endpoints.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - service.send_message("Hello") + data = {"chrome": SUBSCRIPTION_5} - assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass - # Get the keys passed to the WebPusher's send method - assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" + assert service is not None - @patch("homeassistant.components.html5.notify.WebPusher") - def test_fcm_additional_data(self, mock_wp): - """Test if the gcm_key is only included for GCM endpoints.""" - hass = MagicMock() - mock_wp().send().status_code = 201 + await service.async_send_message("Hello") - data = {"chrome": SUBSCRIPTION_5} + assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] - m = mock_open(read_data=json.dumps(data)) - with patch("homeassistant.util.json.open", m, create=True): - service = html5.get_service(hass, VAPID_CONF) + # Get the keys passed to the WebPusher's send method + assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" - assert service is not None - service.send_message("Hello", data={"mykey": "myvalue"}) +@patch("homeassistant.components.html5.notify.WebPusher") +async def test_fcm_additional_data(mock_wp, hass: HomeAssistant): + """Test if the gcm_key is only included for GCM endpoints.""" + await async_setup_component(hass, "http", {}) + mock_wp().send().status_code = 201 - assert len(mock_wp.mock_calls) == 4 - # WebPusher constructor - assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + data = {"chrome": SUBSCRIPTION_5} - # Get the keys passed to the WebPusher's send method - assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" + m = mock_open(read_data=json.dumps(data)) + with patch("homeassistant.util.json.open", m, create=True): + service = await html5.async_get_service(hass, VAPID_CONF) + service.hass = hass + + assert service is not None + + await service.async_send_message("Hello", data={"mykey": "myvalue"}) + + assert len(mock_wp.mock_calls) == 4 + # WebPusher constructor + assert mock_wp.mock_calls[2][1][0] == SUBSCRIPTION_5["subscription"] + + # Get the keys passed to the WebPusher's send method + assert mock_wp.mock_calls[3][2]["headers"]["priority"] == "normal" async def test_registering_new_device_view( diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index f0f87e58173..afff8294f0c 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -30,7 +30,7 @@ from homeassistant.components.http.auth import ( DATA_SIGN_SECRET, SIGN_QUERY_PARAM, STORAGE_KEY, - STRICT_CONNECTION_STATIC_PAGE, + STRICT_CONNECTION_GUARD_PAGE, async_setup_auth, async_sign_path, async_user_not_allowed_do_auth, @@ -879,17 +879,17 @@ async def _drop_connection_unauthorized_request( await client.get("/") -async def _static_page_unauthorized_request( +async def _guard_page_unauthorized_request( hass: HomeAssistant, client: TestClient ) -> None: req = await client.get("/") assert req.status == HTTPStatus.IM_A_TEAPOT - def read_static_page() -> str: - with open(STRICT_CONNECTION_STATIC_PAGE, encoding="utf-8") as file: + def read_guard_page() -> str: + with open(STRICT_CONNECTION_GUARD_PAGE, encoding="utf-8") as file: return file.read() - assert await req.text() == await hass.async_add_executor_job(read_static_page) + assert await req.text() == await hass.async_add_executor_job(read_guard_page) @pytest.mark.parametrize( @@ -909,7 +909,7 @@ async def _static_page_unauthorized_request( ("strict_connection_mode", "request_func"), [ (StrictConnectionMode.DROP_CONNECTION, _drop_connection_unauthorized_request), - (StrictConnectionMode.STATIC_PAGE, _static_page_unauthorized_request), + (StrictConnectionMode.GUARD_PAGE, _guard_page_unauthorized_request), ], ids=["drop connection", "static page"], ) diff --git a/tests/components/http/test_init.py b/tests/components/http/test_init.py index b84da595ab1..b554737e7b3 100644 --- a/tests/components/http/test_init.py +++ b/tests/components/http/test_init.py @@ -548,7 +548,7 @@ async def test_service_create_temporary_strict_connection_url_strict_connection_ ("mode"), [ StrictConnectionMode.DROP_CONNECTION, - StrictConnectionMode.STATIC_PAGE, + StrictConnectionMode.GUARD_PAGE, ], ) async def test_service_create_temporary_strict_connection( diff --git a/tests/components/hue/conftest.py b/tests/components/hue/conftest.py index f87faf6294b..ac827d42d95 100644 --- a/tests/components/hue/conftest.py +++ b/tests/components/hue/conftest.py @@ -136,7 +136,7 @@ def create_mock_api_v1(hass): return api -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def v2_resources_test_data(): """Load V2 resources mock data.""" return json.loads(load_fixture("hue/v2_resources.json")) diff --git a/tests/components/hue/test_config_flow.py b/tests/components/hue/test_config_flow.py index 325c32227e3..692bd1405cf 100644 --- a/tests/components/hue/test_config_flow.py +++ b/tests/components/hue/test_config_flow.py @@ -36,7 +36,10 @@ def create_mock_api_discovery(aioclient_mock, bridges): """Patch aiohttp responses with fake data for bridge discovery.""" aioclient_mock.get( URL_NUPNP, - json=[{"internalipaddress": host, "id": id} for (host, id) in bridges], + json=[ + {"internalipaddress": host, "id": bridge_id} + for (host, bridge_id) in bridges + ], ) for host, bridge_id in bridges: aioclient_mock.get( diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index aea65005fc4..bdbc0a60490 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -50,12 +50,14 @@ 'activity': 'PARKED_IN_CS', 'error_code': 0, 'error_datetime': None, + 'error_datetime_naive': None, 'error_key': None, 'mode': 'MAIN_AREA', 'state': 'RESTRICTED', }), 'planner': dict({ - 'next_start_datetime': '2023-06-05T19:00:00', + 'next_start_datetime': '2023-06-05T19:00:00+00:00', + 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ 'action': 'NOT_ACTIVE', }), @@ -89,7 +91,7 @@ 'work_areas': dict({ '0': dict({ 'cutting_height': 50, - 'name': None, + 'name': 'my_lawn', }), '123456': dict({ 'cutting_height': 50, diff --git a/tests/components/husqvarna_automower/snapshots/test_number.ambr b/tests/components/husqvarna_automower/snapshots/test_number.ambr new file mode 100644 index 00000000000..a5479345bd1 --- /dev/null +++ b/tests/components/husqvarna_automower/snapshots/test_number.ambr @@ -0,0 +1,56 @@ +# serializer version: 1 +# name: test_snapshot_number[number.test_mower_1_cutting_height-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 9, + 'min': 1, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.test_mower_1_cutting_height', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cutting height', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cutting_height', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_cutting_height', + 'unit_of_measurement': None, + }) +# --- +# name: test_snapshot_number[number.test_mower_1_cutting_height-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Mower 1 Cutting height', + 'max': 9, + 'min': 1, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.test_mower_1_cutting_height', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index fda9c900240..7d4533afe72 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -548,7 +548,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2023-06-06T02:00:00+00:00', + 'state': '2023-06-05T19:00:00+00:00', }) # --- # name: test_sensor[sensor.test_mower_1_number_of_charging_cycles-entry] diff --git a/tests/components/husqvarna_automower/test_binary_sensor.py b/tests/components/husqvarna_automower/test_binary_sensor.py index 144dc734025..5500b547853 100644 --- a/tests/components/husqvarna_automower/test_binary_sensor.py +++ b/tests/components/husqvarna_automower/test_binary_sensor.py @@ -20,6 +20,7 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, load_json_value_fixture, + snapshot_platform, ) @@ -71,13 +72,6 @@ async def test_snapshot_binary_sensor( [Platform.BINARY_SENSOR], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") diff --git a/tests/components/husqvarna_automower/test_device_tracker.py b/tests/components/husqvarna_automower/test_device_tracker.py index d9cab0d5074..015be201ccc 100644 --- a/tests/components/husqvarna_automower/test_device_tracker.py +++ b/tests/components/husqvarna_automower/test_device_tracker.py @@ -10,7 +10,7 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform async def test_device_tracker_snapshot( @@ -26,13 +26,6 @@ async def test_device_tracker_snapshot( [Platform.DEVICE_TRACKER], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py new file mode 100644 index 00000000000..b66f1965151 --- /dev/null +++ b/tests/components/husqvarna_automower/test_number.py @@ -0,0 +1,70 @@ +"""Tests for number platform.""" + +from unittest.mock import AsyncMock, patch + +from aioautomower.exceptions import ApiException +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number_commands( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test number commands.""" + entity_id = "number.test_mower_1_cutting_height" + await setup_integration(hass, mock_config_entry) + await hass.services.async_call( + domain="number", + service="set_value", + target={"entity_id": entity_id}, + service_data={"value": "3"}, + blocking=True, + ) + mocked_method = mock_automower_client.set_cutting_height + assert len(mocked_method.mock_calls) == 1 + + mocked_method.side_effect = ApiException("Test error") + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + domain="number", + service="set_value", + target={"entity_id": entity_id}, + service_data={"value": "3"}, + blocking=True, + ) + assert ( + str(exc_info.value) + == "Command couldn't be sent to the command queue: Test error" + ) + assert len(mocked_method.mock_calls) == 2 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_snapshot_number( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test states of the number entity.""" + with patch( + "homeassistant.components.husqvarna_automower.PLATFORMS", + [Platform.NUMBER], + ): + await setup_integration(hass, mock_config_entry) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 5d304330aca..2c0661f82cb 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -10,7 +10,7 @@ from syrupy import SnapshotAssertion from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL -from homeassistant.const import Platform +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -21,6 +21,7 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, load_json_value_fixture, + snapshot_platform, ) @@ -45,7 +46,7 @@ async def test_sensor_unknown_states( async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get("sensor.test_mower_1_mode") - assert state.state == "unknown" + assert state.state == STATE_UNKNOWN async def test_cutting_blade_usage_time_sensor( @@ -62,6 +63,30 @@ async def test_cutting_blade_usage_time_sensor( assert state.state == "0.034" +async def test_next_start_sensor( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test if this sensor is only added, if data is available.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("sensor.test_mower_1_next_start") + assert state is not None + assert state.state == "2023-06-05T19:00:00+00:00" + + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + values[TEST_MOWER_ID].planner.next_start_datetime = None + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_mower_1_next_start") + assert state.state == STATE_UNKNOWN + + @pytest.mark.parametrize( ("sensor_to_test"), [ @@ -132,13 +157,6 @@ async def test_sensor( [Platform.SENSOR], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 8dbb5450db1..aab1128a746 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -23,6 +23,7 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, load_json_value_fixture, + snapshot_platform, ) @@ -106,13 +107,6 @@ async def test_switch( [Platform.SWITCH], ): await setup_integration(hass, mock_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, mock_config_entry.entry_id + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id ) - - assert entity_entries - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" - ) - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") diff --git a/tests/components/hydrawise/conftest.py b/tests/components/hydrawise/conftest.py index 8e22fbe84f7..11670cb3565 100644 --- a/tests/components/hydrawise/conftest.py +++ b/tests/components/hydrawise/conftest.py @@ -15,7 +15,7 @@ from pydrawise.schema import ( import pytest from homeassistant.components.hydrawise.const import DOMAIN -from homeassistant.const import CONF_API_KEY +from homeassistant.const import CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -32,7 +32,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_pydrawise( +def mock_legacy_pydrawise( user: User, controller: Controller, zones: list[Zone], @@ -47,10 +47,32 @@ def mock_pydrawise( yield mock_pydrawise.return_value +@pytest.fixture +def mock_pydrawise( + mock_auth: AsyncMock, + user: User, + controller: Controller, + zones: list[Zone], +) -> Generator[AsyncMock, None, None]: + """Mock Hydrawise.""" + with patch("pydrawise.client.Hydrawise", autospec=True) as mock_pydrawise: + user.controllers = [controller] + controller.zones = zones + mock_pydrawise.return_value.get_user.return_value = user + yield mock_pydrawise.return_value + + +@pytest.fixture +def mock_auth() -> Generator[AsyncMock, None, None]: + """Mock pydrawise Auth.""" + with patch("pydrawise.auth.Auth", autospec=True) as mock_auth: + yield mock_auth.return_value + + @pytest.fixture def user() -> User: """Hydrawise User fixture.""" - return User(customer_id=12345) + return User(customer_id=12345, email="asdf@asdf.com") @pytest.fixture @@ -102,7 +124,7 @@ def zones() -> list[Zone]: @pytest.fixture -def mock_config_entry() -> MockConfigEntry: +def mock_config_entry_legacy() -> MockConfigEntry: """Mock ConfigEntry.""" return MockConfigEntry( title="Hydrawise", @@ -111,6 +133,23 @@ def mock_config_entry() -> MockConfigEntry: CONF_API_KEY: "abc123", }, unique_id="hydrawise-customerid", + version=1, + ) + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock ConfigEntry.""" + return MockConfigEntry( + title="Hydrawise", + domain=DOMAIN, + data={ + CONF_USERNAME: "asfd@asdf.com", + CONF_PASSWORD: "__password__", + }, + unique_id="hydrawise-customerid", + version=1, + minor_version=2, ) diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index b0d5b098309..a7fbc008aab 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -3,15 +3,15 @@ from unittest.mock import AsyncMock from aiohttp import ClientError +from pydrawise.exceptions import NotAuthorizedError from pydrawise.schema import User import pytest from homeassistant import config_entries from homeassistant.components.hydrawise.const import DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_SCAN_INTERVAL -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.const import CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -import homeassistant.helpers.issue_registry as ir from tests.common import MockConfigEntry @@ -33,16 +33,20 @@ async def test_form( assert result["errors"] == {} result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {"api_key": "abc123"} + result["flow_id"], + {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"}, ) mock_pydrawise.get_user.return_value = user await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Hydrawise" - assert result2["data"] == {"api_key": "abc123"} + assert result2["data"] == { + CONF_USERNAME: "asdf@asdf.com", + CONF_PASSWORD: "__password__", + } assert len(mock_setup_entry.mock_calls) == 1 - mock_pydrawise.get_user.assert_called_once_with(fetch_zones=False) + mock_pydrawise.get_user.assert_called_once_with() async def test_form_api_error( @@ -54,7 +58,7 @@ async def test_form_api_error( init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - data = {"api_key": "abc123"} + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} result = await hass.config_entries.flow.async_configure( init_result["flow_id"], data ) @@ -75,7 +79,7 @@ async def test_form_connect_timeout( init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - data = {"api_key": "abc123"} + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} result = await hass.config_entries.flow.async_configure( init_result["flow_id"], data ) @@ -89,86 +93,34 @@ async def test_form_connect_timeout( assert result2["type"] is FlowResultType.CREATE_ENTRY -async def test_flow_import_success( +async def test_form_not_authorized_error( hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User ) -> None: - """Test that we can import a YAML config.""" - mock_pydrawise.get_user.return_value = User - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, + """Test we handle API errors.""" + mock_pydrawise.get_user.side_effect = NotAuthorizedError + + init_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Hydrawise" - assert result["data"] == { - CONF_API_KEY: "__api_key__", - } - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_hydrawise" + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} + result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], data ) - assert issue.translation_key == "deprecated_yaml" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + mock_pydrawise.get_user.reset_mock(side_effect=True) + mock_pydrawise.get_user.return_value = user + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result2["type"] is FlowResultType.CREATE_ENTRY -async def test_flow_import_api_error( - hass: HomeAssistant, mock_pydrawise: AsyncMock +async def test_reauth( + hass: HomeAssistant, + user: User, + mock_pydrawise: AsyncMock, ) -> None: - """Test that we handle API errors on YAML import.""" - mock_pydrawise.get_user.side_effect = ClientError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - DOMAIN, "deprecated_yaml_import_issue_cannot_connect" - ) - assert issue.translation_key == "deprecated_yaml_import_issue" - - -async def test_flow_import_connect_timeout( - hass: HomeAssistant, mock_pydrawise: AsyncMock -) -> None: - """Test that we handle connection timeouts on YAML import.""" - mock_pydrawise.get_user.side_effect = TimeoutError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "timeout_connect" - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - DOMAIN, "deprecated_yaml_import_issue_timeout_connect" - ) - assert issue.translation_key == "deprecated_yaml_import_issue" - - -async def test_flow_import_already_imported( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User -) -> None: - """Test that we can handle a YAML config already imported.""" + """Test that re-authorization works.""" mock_config_entry = MockConfigEntry( title="Hydrawise", domain=DOMAIN, @@ -179,23 +131,20 @@ async def test_flow_import_already_imported( ) mock_config_entry.add_to_hass(hass) - mock_pydrawise.get_user.return_value = user - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={ - CONF_API_KEY: "__api_key__", - CONF_SCAN_INTERVAL: 120, - }, - ) + mock_config_entry.async_start_reauth(hass) await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "user" - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_hydrawise" + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"}, ) - assert issue.translation_key == "deprecated_yaml" + mock_pydrawise.get_user.return_value = user + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" diff --git a/tests/components/hydrawise/test_init.py b/tests/components/hydrawise/test_init.py index 6b41867b044..8ec3c3da648 100644 --- a/tests/components/hydrawise/test_init.py +++ b/tests/components/hydrawise/test_init.py @@ -5,29 +5,11 @@ from unittest.mock import AsyncMock from aiohttp import ClientError from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_ACCESS_TOKEN -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.issue_registry as ir -from homeassistant.setup import async_setup_component +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -async def test_setup_import_success( - hass: HomeAssistant, mock_pydrawise: AsyncMock -) -> None: - """Test that setup with a YAML config triggers an import and warning.""" - config = {"hydrawise": {CONF_ACCESS_TOKEN: "_access-token_"}} - assert await async_setup_component(hass, "hydrawise", config) - await hass.async_block_till_done() - - issue_registry = ir.async_get(hass) - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, "deprecated_yaml_hydrawise" - ) - assert issue.translation_key == "deprecated_yaml" - - async def test_connect_retry( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_pydrawise: AsyncMock ) -> None: @@ -37,3 +19,16 @@ async def test_connect_retry( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_update_version( + hass: HomeAssistant, mock_config_entry_legacy: MockConfigEntry +) -> None: + """Test updating to the GaphQL API works.""" + mock_config_entry_legacy.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_legacy.entry_id) + await hass.async_block_till_done() + assert mock_config_entry_legacy.state is ConfigEntryState.SETUP_ERROR + + # Make sure reauth flow has been initiated + assert any(mock_config_entry_legacy.async_get_active_flows(hass, {"reauth"})) diff --git a/tests/components/hyperion/test_sensor.py b/tests/components/hyperion/test_sensor.py index 65991b4b7e1..8900db177fc 100644 --- a/tests/components/hyperion/test_sensor.py +++ b/tests/components/hyperion/test_sensor.py @@ -159,7 +159,6 @@ async def test_visible_effect_state_changes(hass: HomeAssistant) -> None: KEY_ACTIVE: True, KEY_COMPONENTID: "COLOR", KEY_ORIGIN: "System", - KEY_OWNER: "System", KEY_PRIORITY: 250, KEY_VALUE: {KEY_RGB: [0, 0, 0]}, KEY_VISIBLE: True, diff --git a/tests/components/insteon/test_api_aldb.py b/tests/components/insteon/test_api_aldb.py index 4e0df12c6f1..c919e7a9d22 100644 --- a/tests/components/insteon/test_api_aldb.py +++ b/tests/components/insteon/test_api_aldb.py @@ -26,7 +26,7 @@ from tests.common import load_fixture from tests.typing import WebSocketGenerator -@pytest.fixture(name="aldb_data", scope="session") +@pytest.fixture(name="aldb_data", scope="module") def aldb_data_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("insteon/aldb_data.json")) diff --git a/tests/components/insteon/test_api_properties.py b/tests/components/insteon/test_api_properties.py index d2a388929b5..74ef759006c 100644 --- a/tests/components/insteon/test_api_properties.py +++ b/tests/components/insteon/test_api_properties.py @@ -29,13 +29,13 @@ from tests.common import load_fixture from tests.typing import WebSocketGenerator -@pytest.fixture(name="kpl_properties_data", scope="session") +@pytest.fixture(name="kpl_properties_data", scope="module") def kpl_properties_data_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("insteon/kpl_properties.json")) -@pytest.fixture(name="iolinc_properties_data", scope="session") +@pytest.fixture(name="iolinc_properties_data", scope="module") def iolinc_properties_data_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("insteon/iolinc_properties.json")) diff --git a/tests/components/insteon/test_api_scenes.py b/tests/components/insteon/test_api_scenes.py index 04fc74c89d1..1b8d4d50f08 100644 --- a/tests/components/insteon/test_api_scenes.py +++ b/tests/components/insteon/test_api_scenes.py @@ -18,7 +18,7 @@ from tests.common import load_fixture from tests.typing import WebSocketGenerator -@pytest.fixture(name="scene_data", scope="session") +@pytest.fixture(name="scene_data", scope="module") def aldb_data_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("insteon/scene_data.json")) diff --git a/tests/components/jvc_projector/test_select.py b/tests/components/jvc_projector/test_select.py new file mode 100644 index 00000000000..a52133bd688 --- /dev/null +++ b/tests/components/jvc_projector/test_select.py @@ -0,0 +1,44 @@ +"""Tests for JVC Projector select platform.""" + +from unittest.mock import MagicMock + +from jvcprojector import const + +from homeassistant.components.select import ( + ATTR_OPTIONS, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_OPTION +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + +INPUT_ENTITY_ID = "select.jvc_projector_input" + + +async def test_input_select( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_device: MagicMock, + mock_integration: MockConfigEntry, +) -> None: + """Test input select.""" + entity = hass.states.get(INPUT_ENTITY_ID) + assert entity + assert entity.attributes.get(ATTR_FRIENDLY_NAME) == "JVC Projector Input" + assert entity.attributes.get(ATTR_OPTIONS) == [const.HDMI1, const.HDMI2] + assert entity.state == const.HDMI1 + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: INPUT_ENTITY_ID, + ATTR_OPTION: const.HDMI2, + }, + blocking=True, + ) + + mock_device.remote.assert_called_once_with(const.REMOTE_HDMI_2) diff --git a/tests/components/knx/test_notify.py b/tests/components/knx/test_notify.py index d843c460c34..94f2d579fc8 100644 --- a/tests/components/knx/test_notify.py +++ b/tests/components/knx/test_notify.py @@ -1,5 +1,6 @@ """Test KNX notify.""" +from homeassistant.components import notify from homeassistant.components.knx.const import KNX_ADDRESS from homeassistant.components.knx.schema import NotifySchema from homeassistant.const import CONF_NAME, CONF_TYPE @@ -8,7 +9,9 @@ from homeassistant.core import HomeAssistant from .conftest import KNXTestKit -async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_legacy_notify_service_simple( + hass: HomeAssistant, knx: KNXTestKit +) -> None: """Test KNX notify can send to one device.""" await knx.setup_integration( { @@ -26,22 +29,7 @@ async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: await knx.assert_write( "1/0/0", - ( - 0x49, - 0x20, - 0x6C, - 0x6F, - 0x76, - 0x65, - 0x20, - 0x4B, - 0x4E, - 0x58, - 0x0, - 0x0, - 0x0, - 0x0, - ), + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), ) await hass.services.async_call( @@ -56,26 +44,11 @@ async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: await knx.assert_write( "1/0/0", - ( - 0x49, - 0x20, - 0x6C, - 0x6F, - 0x76, - 0x65, - 0x20, - 0x4B, - 0x4E, - 0x58, - 0x2C, - 0x20, - 0x62, - 0x75, - ), + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), ) -async def test_notify_multiple_sends_to_all_with_different_encodings( +async def test_legacy_notify_service_multiple_sends_to_all_with_different_encodings( hass: HomeAssistant, knx: KNXTestKit ) -> None: """Test KNX notify `type` configuration.""" @@ -110,3 +83,91 @@ async def test_notify_multiple_sends_to_all_with_different_encodings( "1/0/1", (71, 228, 110, 115, 101, 102, 252, 223, 99, 104, 101, 110, 0, 0), ) + + +async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test KNX notify can send to one device.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: { + CONF_NAME: "test", + KNX_ADDRESS: "1/0/0", + } + } + ) + + await hass.services.async_call( + notify.DOMAIN, + notify.SERVICE_SEND_MESSAGE, + { + "entity_id": "notify.test", + notify.ATTR_MESSAGE: "I love KNX", + }, + ) + await knx.assert_write( + "1/0/0", + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), + ) + + await hass.services.async_call( + notify.DOMAIN, + notify.SERVICE_SEND_MESSAGE, + { + "entity_id": "notify.test", + notify.ATTR_MESSAGE: "I love KNX, but this text is too long for KNX, poor KNX", + }, + ) + await knx.assert_write( + "1/0/0", + (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), + ) + + +async def test_notify_multiple_sends_with_different_encodings( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX notify `type` configuration.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: [ + { + CONF_NAME: "ASCII", + KNX_ADDRESS: "1/0/0", + CONF_TYPE: "string", + }, + { + CONF_NAME: "Latin-1", + KNX_ADDRESS: "1/0/1", + CONF_TYPE: "latin_1", + }, + ] + } + ) + message = {notify.ATTR_MESSAGE: "Gänsefüßchen"} + + await hass.services.async_call( + notify.DOMAIN, + notify.SERVICE_SEND_MESSAGE, + { + "entity_id": "notify.ascii", + **message, + }, + ) + await knx.assert_write( + "1/0/0", + # "G?nsef??chen" + (71, 63, 110, 115, 101, 102, 63, 63, 99, 104, 101, 110, 0, 0), + ) + + await hass.services.async_call( + notify.DOMAIN, + notify.SERVICE_SEND_MESSAGE, + { + "entity_id": "notify.latin_1", + **message, + }, + ) + await knx.assert_write( + "1/0/1", + (71, 228, 110, 115, 101, 102, 252, 223, 99, 104, 101, 110, 0, 0), + ) diff --git a/tests/components/knx/test_repairs.py b/tests/components/knx/test_repairs.py new file mode 100644 index 00000000000..4ad06e0addb --- /dev/null +++ b/tests/components/knx/test_repairs.py @@ -0,0 +1,84 @@ +"""Test repairs for KNX integration.""" + +from http import HTTPStatus + +from homeassistant.components.knx.const import DOMAIN, KNX_ADDRESS +from homeassistant.components.knx.schema import NotifySchema +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.const import CONF_NAME +from homeassistant.core import HomeAssistant +import homeassistant.helpers.issue_registry as ir + +from .conftest import KNXTestKit + +from tests.typing import ClientSessionGenerator + + +async def test_knx_notify_service_issue( + hass: HomeAssistant, + knx: KNXTestKit, + hass_client: ClientSessionGenerator, + issue_registry: ir.IssueRegistry, +) -> None: + """Test the legacy notify service still works before migration and repair flow is triggered.""" + await knx.setup_integration( + { + NotifySchema.PLATFORM: { + CONF_NAME: "test", + KNX_ADDRESS: "1/0/0", + } + } + ) + http_client = await hass_client() + + # Assert no issue is present + assert len(issue_registry.issues) == 0 + + # Simulate legacy service being used + assert hass.services.has_service(NOTIFY_DOMAIN, NOTIFY_DOMAIN) + await hass.services.async_call( + NOTIFY_DOMAIN, + NOTIFY_DOMAIN, + service_data={"message": "It is too cold!", "target": "test"}, + blocking=True, + ) + await knx.assert_write( + "1/0/0", + (73, 116, 32, 105, 115, 32, 116, 111, 111, 32, 99, 111, 108, 100), + ) + + # Assert the issue is present + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue( + domain=DOMAIN, + issue_id="migrate_notify", + ) + + # Test confirm step in repair flow + resp = await http_client.post( + RepairsFlowIndexView.url, + json={"handler": DOMAIN, "issue_id": "migrate_notify"}, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + resp = await http_client.post( + RepairsFlowResourceView.url.format(flow_id=flow_id), + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data["type"] == "create_entry" + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue( + domain=DOMAIN, + issue_id="migrate_notify", + ) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..72886410924 --- /dev/null +++ b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr @@ -0,0 +1,79 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'coordinator_data': dict({ + 'test1': dict({ + 'name': 'Test Garage 1', + 'subdevices': dict({ + 'GDO': dict({ + 'Open_B': 'true', + 'Open_P': '100', + }), + 'Light': dict({ + 'On_B': 'true', + 'On_P': '100', + }), + }), + }), + 'test2': dict({ + 'name': 'Test Garage 2', + 'subdevices': dict({ + 'GDO': dict({ + 'Open_B': 'false', + 'Open_P': '0', + }), + 'Light': dict({ + 'On_B': 'false', + 'On_P': '0', + }), + }), + }), + 'test3': dict({ + 'name': 'Test Garage 3', + 'subdevices': dict({ + 'GDO': dict({ + 'Open_B': 'false', + 'Opening_P': '0', + }), + 'Light': dict({ + 'On_B': 'false', + 'On_P': '0', + }), + }), + }), + 'test4': dict({ + 'name': 'Test Garage 4', + 'subdevices': dict({ + 'GDO': dict({ + 'Open_B': 'true', + 'Opening_P': '100', + }), + 'Light': dict({ + 'On_B': 'true', + 'On_P': '100', + }), + }), + }), + }), + 'entry': dict({ + 'data': dict({ + 'device_id': 'test-uuid', + 'email': '**REDACTED**', + 'password': '**REDACTED**', + 'site_id': 'test-site-id', + }), + 'disabled_by': None, + 'domain': 'linear_garage_door', + 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/linear_garage_door/test_cover.py b/tests/components/linear_garage_door/test_cover.py index 9db7b80fd0e..6236d2ba39c 100644 --- a/tests/components/linear_garage_door/test_cover.py +++ b/tests/components/linear_garage_door/test_cover.py @@ -45,7 +45,7 @@ async def test_open_cover(hass: HomeAssistant) -> None: await async_init_integration(hass) with patch( - "homeassistant.components.linear_garage_door.cover.Linear.operate_device" + "homeassistant.components.linear_garage_door.coordinator.Linear.operate_device" ) as operate_device: await hass.services.async_call( COVER_DOMAIN, @@ -58,15 +58,15 @@ async def test_open_cover(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.linear_garage_door.cover.Linear.login", + "homeassistant.components.linear_garage_door.coordinator.Linear.login", return_value=True, ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.operate_device", + "homeassistant.components.linear_garage_door.coordinator.Linear.operate_device", return_value=None, ) as operate_device, patch( - "homeassistant.components.linear_garage_door.cover.Linear.close", + "homeassistant.components.linear_garage_door.coordinator.Linear.close", return_value=True, ), ): @@ -80,11 +80,11 @@ async def test_open_cover(hass: HomeAssistant) -> None: assert operate_device.call_count == 1 with ( patch( - "homeassistant.components.linear_garage_door.cover.Linear.login", + "homeassistant.components.linear_garage_door.coordinator.Linear.login", return_value=True, ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.get_devices", + "homeassistant.components.linear_garage_door.coordinator.Linear.get_devices", return_value=[ { "id": "test1", @@ -99,7 +99,7 @@ async def test_open_cover(hass: HomeAssistant) -> None: ], ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.get_device_state", + "homeassistant.components.linear_garage_door.coordinator.Linear.get_device_state", side_effect=lambda id: { "test1": { "GDO": {"Open_B": "true", "Open_P": "100"}, @@ -120,7 +120,7 @@ async def test_open_cover(hass: HomeAssistant) -> None: }[id], ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.close", + "homeassistant.components.linear_garage_door.coordinator.Linear.close", return_value=True, ), ): @@ -136,7 +136,7 @@ async def test_close_cover(hass: HomeAssistant) -> None: await async_init_integration(hass) with patch( - "homeassistant.components.linear_garage_door.cover.Linear.operate_device" + "homeassistant.components.linear_garage_door.coordinator.Linear.operate_device" ) as operate_device: await hass.services.async_call( COVER_DOMAIN, @@ -149,15 +149,15 @@ async def test_close_cover(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.linear_garage_door.cover.Linear.login", + "homeassistant.components.linear_garage_door.coordinator.Linear.login", return_value=True, ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.operate_device", + "homeassistant.components.linear_garage_door.coordinator.Linear.operate_device", return_value=None, ) as operate_device, patch( - "homeassistant.components.linear_garage_door.cover.Linear.close", + "homeassistant.components.linear_garage_door.coordinator.Linear.close", return_value=True, ), ): @@ -171,11 +171,11 @@ async def test_close_cover(hass: HomeAssistant) -> None: assert operate_device.call_count == 1 with ( patch( - "homeassistant.components.linear_garage_door.cover.Linear.login", + "homeassistant.components.linear_garage_door.coordinator.Linear.login", return_value=True, ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.get_devices", + "homeassistant.components.linear_garage_door.coordinator.Linear.get_devices", return_value=[ { "id": "test1", @@ -190,7 +190,7 @@ async def test_close_cover(hass: HomeAssistant) -> None: ], ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.get_device_state", + "homeassistant.components.linear_garage_door.coordinator.Linear.get_device_state", side_effect=lambda id: { "test1": { "GDO": {"Open_B": "true", "Opening_P": "100"}, @@ -211,7 +211,7 @@ async def test_close_cover(hass: HomeAssistant) -> None: }[id], ), patch( - "homeassistant.components.linear_garage_door.cover.Linear.close", + "homeassistant.components.linear_garage_door.coordinator.Linear.close", return_value=True, ), ): diff --git a/tests/components/linear_garage_door/test_diagnostics.py b/tests/components/linear_garage_door/test_diagnostics.py index 0650196d619..a9565441bbb 100644 --- a/tests/components/linear_garage_door/test_diagnostics.py +++ b/tests/components/linear_garage_door/test_diagnostics.py @@ -1,5 +1,7 @@ """Test diagnostics of Linear Garage Door.""" +from syrupy import SnapshotAssertion + from homeassistant.core import HomeAssistant from .util import async_init_integration @@ -9,45 +11,11 @@ from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" entry = await async_init_integration(hass) result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - - assert result["entry"]["data"] == { - "email": "**REDACTED**", - "password": "**REDACTED**", - "site_id": "test-site-id", - "device_id": "test-uuid", - } - assert result["coordinator_data"] == { - "test1": { - "name": "Test Garage 1", - "subdevices": { - "GDO": {"Open_B": "true", "Open_P": "100"}, - "Light": {"On_B": "true", "On_P": "100"}, - }, - }, - "test2": { - "name": "Test Garage 2", - "subdevices": { - "GDO": {"Open_B": "false", "Open_P": "0"}, - "Light": {"On_B": "false", "On_P": "0"}, - }, - }, - "test3": { - "name": "Test Garage 3", - "subdevices": { - "GDO": {"Open_B": "false", "Opening_P": "0"}, - "Light": {"On_B": "false", "On_P": "0"}, - }, - }, - "test4": { - "name": "Test Garage 4", - "subdevices": { - "GDO": {"Open_B": "true", "Opening_P": "100"}, - "Light": {"On_B": "true", "On_P": "100"}, - }, - }, - } + assert result == snapshot diff --git a/tests/components/linear_garage_door/util.py b/tests/components/linear_garage_door/util.py index 1a849ae2348..30dbdbd06d5 100644 --- a/tests/components/linear_garage_door/util.py +++ b/tests/components/linear_garage_door/util.py @@ -12,6 +12,7 @@ async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry: """Initialize mock integration.""" config_entry = MockConfigEntry( domain=DOMAIN, + entry_id="acefdd4b3a4a0911067d1cf51414201e", data={ "email": "test-email", "password": "test-password", diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index 9dc52871529..ba767f51ac6 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -3,7 +3,6 @@ from collections.abc import Iterable from contextlib import suppress import copy -from datetime import datetime import json from pathlib import Path from typing import Any @@ -83,7 +82,7 @@ def help_all_subscribe_calls(mqtt_client_mock: MqttMockPahoClient) -> list[Any]: def help_custom_config( mqtt_entity_domain: str, mqtt_base_config: ConfigType, - mqtt_entity_configs: Iterable[ConfigType,], + mqtt_entity_configs: Iterable[ConfigType], ) -> ConfigType: """Tweak a default config for parametrization. @@ -1326,12 +1325,12 @@ async def help_test_entity_debug_info_max_messages( "subscriptions" ] - start_dt = datetime(2019, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) - with freeze_time(start_dt): + with freeze_time(start_dt := dt_util.utcnow()): for i in range(debug_info.STORED_MESSAGES + 1): async_fire_mqtt_message(hass, "test-topic", f"{i}") - debug_info_data = debug_info.info_for_device(hass, device.id) + debug_info_data = debug_info.info_for_device(hass, device.id) + assert len(debug_info_data["entities"][0]["subscriptions"]) == 1 assert ( len(debug_info_data["entities"][0]["subscriptions"][0]["messages"]) @@ -1401,36 +1400,35 @@ async def help_test_entity_debug_info_message( debug_info_data = debug_info.info_for_device(hass, device.id) - start_dt = datetime(2019, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) - if state_topic is not None: assert len(debug_info_data["entities"][0]["subscriptions"]) >= 1 assert {"topic": state_topic, "messages": []} in debug_info_data["entities"][0][ "subscriptions" ] - with freeze_time(start_dt): + with freeze_time(start_dt := dt_util.utcnow()): async_fire_mqtt_message(hass, str(state_topic), state_payload) - debug_info_data = debug_info.info_for_device(hass, device.id) - assert len(debug_info_data["entities"][0]["subscriptions"]) >= 1 - assert { - "topic": state_topic, - "messages": [ - { - "payload": str(state_payload), - "qos": 0, - "retain": False, - "time": start_dt, - "topic": state_topic, - } - ], - } in debug_info_data["entities"][0]["subscriptions"] + debug_info_data = debug_info.info_for_device(hass, device.id) + assert len(debug_info_data["entities"][0]["subscriptions"]) >= 1 + assert { + "topic": state_topic, + "messages": [ + { + "payload": str(state_payload), + "qos": 0, + "retain": False, + "time": start_dt, + "topic": state_topic, + } + ], + } in debug_info_data["entities"][0]["subscriptions"] expected_transmissions = [] - if service: - # Trigger an outgoing MQTT message - with freeze_time(start_dt): + + with freeze_time(start_dt := dt_util.utcnow()): + if service: + # Trigger an outgoing MQTT message if service: service_data = {ATTR_ENTITY_ID: f"{domain}.beer_test"} if service_parameters: @@ -1443,23 +1441,23 @@ async def help_test_entity_debug_info_message( blocking=True, ) - expected_transmissions = [ - { - "topic": command_topic, - "messages": [ - { - "payload": str(command_payload), - "qos": 0, - "retain": False, - "time": start_dt, - "topic": command_topic, - } - ], - } - ] + expected_transmissions = [ + { + "topic": command_topic, + "messages": [ + { + "payload": str(command_payload), + "qos": 0, + "retain": False, + "time": start_dt, + "topic": command_topic, + } + ], + } + ] - debug_info_data = debug_info.info_for_device(hass, device.id) - assert debug_info_data["entities"][0]["transmitted"] == expected_transmissions + debug_info_data = debug_info.info_for_device(hass, device.id) + assert debug_info_data["entities"][0]["transmitted"] == expected_transmissions async def help_test_entity_debug_info_remove( diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index bbba791137a..422ec84c091 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -14,6 +14,7 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components import mqtt from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -901,7 +902,7 @@ async def test_option_flow_default_suggested_values( } suggested = { mqtt.CONF_USERNAME: "user", - mqtt.CONF_PASSWORD: "pass", + mqtt.CONF_PASSWORD: PWD_NOT_CHANGED, } for key, value in defaults.items(): assert get_default(result["data_schema"].schema, key) == value @@ -963,7 +964,7 @@ async def test_option_flow_default_suggested_values( } suggested = { mqtt.CONF_USERNAME: "us3r", - mqtt.CONF_PASSWORD: "p4ss", + mqtt.CONF_PASSWORD: PWD_NOT_CHANGED, } for key, value in defaults.items(): assert get_default(result["data_schema"].schema, key) == value @@ -1060,6 +1061,102 @@ async def test_skipping_advanced_options( assert result["step_id"] == step_id +@pytest.mark.parametrize( + ("test_input", "user_input", "new_password"), + [ + ( + { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_USERNAME: "username", + mqtt.CONF_PASSWORD: "verysecret", + }, + { + mqtt.CONF_USERNAME: "username", + mqtt.CONF_PASSWORD: "newpassword", + }, + "newpassword", + ), + ( + { + mqtt.CONF_BROKER: "test-broker", + mqtt.CONF_USERNAME: "username", + mqtt.CONF_PASSWORD: "verysecret", + }, + { + mqtt.CONF_USERNAME: "username", + mqtt.CONF_PASSWORD: PWD_NOT_CHANGED, + }, + "verysecret", + ), + ], +) +async def test_step_reauth( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_client_mock: MqttMockPahoClient, + mock_try_connection: MagicMock, + mock_reload_after_entry_update: MagicMock, + test_input: dict[str, Any], + user_input: dict[str, Any], + new_password: str, +) -> None: + """Test that the reauth step works.""" + + # Prepare the config entry + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + hass.config_entries.async_update_entry( + config_entry, + data=test_input, + ) + await mqtt_mock_entry() + + # Start reauth flow + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "reauth_confirm" + assert result["context"]["source"] == "reauth" + + # Show the form + result = await hass.config_entries.flow.async_init( + mqtt.DOMAIN, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + # Simulate re-auth fails + mock_try_connection.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=user_input + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + # Simulate re-auth succeeds + mock_try_connection.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=user_input + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert config_entry.data.get(mqtt.CONF_PASSWORD) == new_password + await hass.async_block_till_done() + + async def test_options_user_connection_fails( hass: HomeAssistant, mock_try_connection_time_out: MagicMock ) -> None: @@ -1232,7 +1329,7 @@ async def test_try_connection_with_advanced_parameters( } suggested = { mqtt.CONF_USERNAME: "user", - mqtt.CONF_PASSWORD: "pass", + mqtt.CONF_PASSWORD: PWD_NOT_CHANGED, mqtt.CONF_TLS_INSECURE: True, mqtt.CONF_PROTOCOL: "3.1.1", mqtt.CONF_TRANSPORT: "websockets", diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 24891895fad..a00af080bf1 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -1487,6 +1487,7 @@ async def test_mqtt_integration_discovery_subscribe_unsubscribe( await async_start(hass, "homeassistant", entry) await hass.async_block_till_done() await hass.async_block_till_done() + await hass.async_block_till_done() assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) assert not mqtt_client_mock.unsubscribe.called @@ -1537,6 +1538,7 @@ async def test_mqtt_discovery_unsubscribe_once( await async_start(hass, "homeassistant", entry) await hass.async_block_till_done() await hass.async_block_till_done() + await hass.async_block_till_done() assert ("comp/discovery/#", 0) in help_all_subscribe_calls(mqtt_client_mock) assert not mqtt_client_mock.unsubscribe.called diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 3e444e8d4c8..9d135b89f36 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -4,17 +4,22 @@ import asyncio from copy import deepcopy from datetime import datetime, timedelta import json +import socket import ssl from typing import Any, TypedDict from unittest.mock import ANY, MagicMock, call, mock_open, patch from freezegun.api import FrozenDateTimeFactory +import paho.mqtt.client as paho_mqtt import pytest import voluptuous as vol from homeassistant.components import mqtt from homeassistant.components.mqtt import debug_info -from homeassistant.components.mqtt.client import EnsureJobAfterCooldown +from homeassistant.components.mqtt.client import ( + RECONNECT_INTERVAL_SECONDS, + EnsureJobAfterCooldown, +) from homeassistant.components.mqtt.mixins import MQTT_ENTITY_DEVICE_INFO_SCHEMA from homeassistant.components.mqtt.models import ( MessageCallbackType, @@ -146,7 +151,7 @@ async def test_mqtt_disconnects_on_home_assistant_stop( hass.bus.fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() await hass.async_block_till_done() - assert mqtt_client_mock.loop_stop.call_count == 1 + assert mqtt_client_mock.disconnect.call_count == 1 async def test_mqtt_await_ack_at_disconnect( @@ -161,8 +166,14 @@ async def test_mqtt_await_ack_at_disconnect( rc = 0 with patch("paho.mqtt.client.Client") as mock_client: - mock_client().connect = MagicMock(return_value=0) - mock_client().publish = MagicMock(return_value=FakeInfo()) + mqtt_client = mock_client.return_value + mqtt_client.connect = MagicMock( + return_value=0, + side_effect=lambda *args, **kwargs: hass.loop.call_soon_threadsafe( + mqtt_client.on_connect, mqtt_client, None, 0, 0, 0 + ), + ) + mqtt_client.publish = MagicMock(return_value=FakeInfo()) entry = MockConfigEntry( domain=mqtt.DOMAIN, data={"certificate": "auto", mqtt.CONF_BROKER: "test-broker"}, @@ -1669,6 +1680,7 @@ async def test_not_calling_subscribe_when_unsubscribed_within_cooldown( the subscribe cool down period has ended. """ mqtt_mock = await mqtt_mock_entry() + mqtt_client_mock.subscribe.reset_mock() # Fake that the client is connected mqtt_mock().connected = True @@ -1925,6 +1937,7 @@ async def test_canceling_debouncer_on_shutdown( """Test canceling the debouncer when HA shuts down.""" mqtt_mock = await mqtt_mock_entry() + mqtt_client_mock.subscribe.reset_mock() # Fake that the client is connected mqtt_mock().connected = True @@ -2008,7 +2021,7 @@ async def test_initial_setup_logs_error( """Test for setup failure if initial client connection fails.""" entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) entry.add_to_hass(hass) - mqtt_client_mock.connect.return_value = 1 + mqtt_client_mock.connect.side_effect = MagicMock(return_value=1) try: assert await hass.config_entries.async_setup(entry.entry_id) except HomeAssistantError: @@ -2033,6 +2046,24 @@ async def test_logs_error_if_no_connect_broker( ) +@pytest.mark.parametrize("return_code", [4, 5]) +async def test_triggers_reauth_flow_if_auth_fails( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_client_mock: MqttMockPahoClient, + return_code: int, +) -> None: + """Test re-auth is triggered if authentication is failing.""" + await mqtt_mock_entry() + # test with rc = 4 -> CONNACK_REFUSED_NOT_AUTHORIZED and 5 -> CONNACK_REFUSED_BAD_USERNAME_PASSWORD + mqtt_client_mock.on_connect(mqtt_client_mock, None, None, return_code) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["source"] == "reauth" + + @patch("homeassistant.components.mqtt.client.TIMEOUT_ACK", 0.3) async def test_handle_mqtt_on_callback( hass: HomeAssistant, @@ -2230,7 +2261,12 @@ async def test_handle_mqtt_timeout_on_callback( mock_client = mock_client.return_value mock_client.publish.return_value = FakeInfo() mock_client.subscribe.side_effect = _mock_ack - mock_client.connect.return_value = 0 + mock_client.connect = MagicMock( + return_value=0, + side_effect=lambda *args, **kwargs: hass.loop.call_soon_threadsafe( + mock_client.on_connect, mock_client, None, 0, 0, 0 + ), + ) entry = MockConfigEntry( domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"} @@ -3015,14 +3051,16 @@ async def test_debug_info_multiple_devices( for dev in devices: data = json.dumps(dev["config"]) domain = dev["domain"] - id = dev["config"]["device"]["identifiers"][0] - async_fire_mqtt_message(hass, f"homeassistant/{domain}/{id}/config", data) + device_id = dev["config"]["device"]["identifiers"][0] + async_fire_mqtt_message( + hass, f"homeassistant/{domain}/{device_id}/config", data + ) await hass.async_block_till_done() for dev in devices: domain = dev["domain"] - id = dev["config"]["device"]["identifiers"][0] - device = device_registry.async_get_device(identifiers={("mqtt", id)}) + device_id = dev["config"]["device"]["identifiers"][0] + device = device_registry.async_get_device(identifiers={("mqtt", device_id)}) assert device is not None debug_info_data = debug_info.info_for_device(hass, device.id) @@ -3040,7 +3078,7 @@ async def test_debug_info_multiple_devices( assert len(debug_info_data["triggers"]) == 1 discovery_data = debug_info_data["triggers"][0]["discovery_data"] - assert discovery_data["topic"] == f"homeassistant/{domain}/{id}/config" + assert discovery_data["topic"] == f"homeassistant/{domain}/{device_id}/config" assert discovery_data["payload"] == dev["config"] @@ -3098,8 +3136,10 @@ async def test_debug_info_multiple_entities_triggers( data = json.dumps(c["config"]) domain = c["domain"] # Use topic as discovery_id - id = c["config"].get("topic", c["config"].get("state_topic")) - async_fire_mqtt_message(hass, f"homeassistant/{domain}/{id}/config", data) + discovery_id = c["config"].get("topic", c["config"].get("state_topic")) + async_fire_mqtt_message( + hass, f"homeassistant/{domain}/{discovery_id}/config", data + ) await hass.async_block_till_done() device_id = config[0]["config"]["device"]["identifiers"][0] @@ -3113,7 +3153,7 @@ async def test_debug_info_multiple_entities_triggers( # Test we get debug info for each entity and trigger domain = c["domain"] # Use topic as discovery_id - id = c["config"].get("topic", c["config"].get("state_topic")) + discovery_id = c["config"].get("topic", c["config"].get("state_topic")) if c["domain"] != "device_automation": discovery_data = [e["discovery_data"] for e in debug_info_data["entities"]] @@ -3125,7 +3165,7 @@ async def test_debug_info_multiple_entities_triggers( discovery_data = [e["discovery_data"] for e in debug_info_data["triggers"]] assert { - "topic": f"homeassistant/{domain}/{id}/config", + "topic": f"homeassistant/{domain}/{discovery_id}/config", "payload": c["config"], } in discovery_data @@ -4144,3 +4184,179 @@ async def test_multi_platform_discovery( ) is not None ) + + +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_auto_reconnect( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test reconnection is automatically done.""" + mqtt_mock = await mqtt_mock_entry() + await hass.async_block_till_done() + assert mqtt_mock.connected is True + mqtt_client_mock.reconnect.reset_mock() + + mqtt_client_mock.disconnect() + mqtt_client_mock.on_disconnect(None, None, 0) + await hass.async_block_till_done() + + mqtt_client_mock.reconnect.side_effect = OSError("foo") + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + assert len(mqtt_client_mock.reconnect.mock_calls) == 1 + assert "Error re-connecting to MQTT server due to exception: foo" in caplog.text + + mqtt_client_mock.reconnect.side_effect = None + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + assert len(mqtt_client_mock.reconnect.mock_calls) == 2 + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + + mqtt_client_mock.disconnect() + mqtt_client_mock.on_disconnect(None, None, 0) + await hass.async_block_till_done() + + async_fire_time_changed( + hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) + ) + await hass.async_block_till_done() + # Should not reconnect after stop + assert len(mqtt_client_mock.reconnect.mock_calls) == 2 + + +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_server_sock_connect_and_disconnect( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, + calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_mock = await mqtt_mock_entry() + await hass.async_block_till_done() + assert mqtt_mock.connected is True + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) + await hass.async_block_till_done() + + server.close() # mock the server closing the connection on us + + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + mqtt_client_mock.on_socket_unregister_write(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_close(mqtt_client_mock, None, client) + mqtt_client_mock.on_disconnect(mqtt_client_mock, None, client) + await hass.async_block_till_done() + unsub() + + # Should have failed + assert len(calls) == 0 + + +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_client_sock_failure_after_connect( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, + calls: list[ReceiveMessage], + record_calls: MessageCallbackType, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_mock = await mqtt_mock_entry() + # Fake that the client is connected + mqtt_mock().connected = True + await hass.async_block_till_done() + assert mqtt_mock.connected is True + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_writer(mqtt_client_mock, None, client) + await hass.async_block_till_done() + + mqtt_client_mock.loop_write.side_effect = OSError("foo") + client.close() # close the client socket out from under the client + + assert mqtt_mock.connected is True + unsub = await mqtt.async_subscribe(hass, "test-topic", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) + await hass.async_block_till_done() + + unsub() + # Should have failed + assert len(calls) == 0 + + +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +async def test_loop_write_failure( + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test handling the socket connected and disconnected.""" + mqtt_mock = await mqtt_mock_entry() + await hass.async_block_till_done() + assert mqtt_mock.connected is True + + mqtt_client_mock.loop_misc.return_value = paho_mqtt.MQTT_ERR_SUCCESS + + client, server = socket.socketpair( + family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0 + ) + client.setblocking(False) + server.setblocking(False) + mqtt_client_mock.on_socket_open(mqtt_client_mock, None, client) + mqtt_client_mock.on_socket_register_write(mqtt_client_mock, None, client) + mqtt_client_mock.loop_write.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + mqtt_client_mock.loop_read.return_value = paho_mqtt.MQTT_ERR_CONN_LOST + + # Fill up the outgoing buffer to ensure that loop_write + # and loop_read are called that next time control is + # returned to the event loop + try: + for _ in range(1000): + server.send(b"long" * 100) + except BlockingIOError: + pass + + server.close() + # Once for the reader callback + await hass.async_block_till_done() + # Another for the writer callback + await hass.async_block_till_done() + # Final for the disconnect callback + await hass.async_block_till_done() + + assert "Disconnected from MQTT server mock-broker:1883 (7)" in caplog.text diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index ff1b308ef70..739240a352c 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -236,7 +236,7 @@ async def test_warning_if_color_mode_flags_are_used( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, - color_modes: tuple[str,], + color_modes: tuple[str, ...], ) -> None: """Test warnings deprecated config keys without supported color modes defined.""" with patch( @@ -278,7 +278,7 @@ async def test_warning_on_discovery_if_color_mode_flags_are_used( mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, config: dict[str, Any], - color_modes: tuple[str,], + color_modes: tuple[str, ...], ) -> None: """Test warnings deprecated config keys with discovery.""" with patch( diff --git a/tests/components/mysensors/conftest.py b/tests/components/mysensors/conftest.py index e18043fda1f..01d6f5d9620 100644 --- a/tests/components/mysensors/conftest.py +++ b/tests/components/mysensors/conftest.py @@ -206,7 +206,7 @@ def update_gateway_nodes( return nodes -@pytest.fixture(name="cover_node_binary_state", scope="session") +@pytest.fixture(name="cover_node_binary_state", scope="package") def cover_node_binary_state_fixture() -> dict: """Load the cover node state.""" return load_nodes_state("cover_node_binary_state.json") @@ -221,7 +221,7 @@ def cover_node_binary( return nodes[1] -@pytest.fixture(name="cover_node_percentage_state", scope="session") +@pytest.fixture(name="cover_node_percentage_state", scope="package") def cover_node_percentage_state_fixture() -> dict: """Load the cover node state.""" return load_nodes_state("cover_node_percentage_state.json") @@ -236,7 +236,7 @@ def cover_node_percentage( return nodes[1] -@pytest.fixture(name="door_sensor_state", scope="session") +@pytest.fixture(name="door_sensor_state", scope="package") def door_sensor_state_fixture() -> dict: """Load the door sensor state.""" return load_nodes_state("door_sensor_state.json") @@ -249,7 +249,7 @@ def door_sensor(gateway_nodes: dict[int, Sensor], door_sensor_state: dict) -> Se return nodes[1] -@pytest.fixture(name="gps_sensor_state", scope="session") +@pytest.fixture(name="gps_sensor_state", scope="package") def gps_sensor_state_fixture() -> dict: """Load the gps sensor state.""" return load_nodes_state("gps_sensor_state.json") @@ -262,7 +262,7 @@ def gps_sensor(gateway_nodes: dict[int, Sensor], gps_sensor_state: dict) -> Sens return nodes[1] -@pytest.fixture(name="dimmer_node_state", scope="session") +@pytest.fixture(name="dimmer_node_state", scope="package") def dimmer_node_state_fixture() -> dict: """Load the dimmer node state.""" return load_nodes_state("dimmer_node_state.json") @@ -275,7 +275,7 @@ def dimmer_node(gateway_nodes: dict[int, Sensor], dimmer_node_state: dict) -> Se return nodes[1] -@pytest.fixture(name="hvac_node_auto_state", scope="session") +@pytest.fixture(name="hvac_node_auto_state", scope="package") def hvac_node_auto_state_fixture() -> dict: """Load the hvac node auto state.""" return load_nodes_state("hvac_node_auto_state.json") @@ -290,7 +290,7 @@ def hvac_node_auto( return nodes[1] -@pytest.fixture(name="hvac_node_cool_state", scope="session") +@pytest.fixture(name="hvac_node_cool_state", scope="package") def hvac_node_cool_state_fixture() -> dict: """Load the hvac node cool state.""" return load_nodes_state("hvac_node_cool_state.json") @@ -305,7 +305,7 @@ def hvac_node_cool( return nodes[1] -@pytest.fixture(name="hvac_node_heat_state", scope="session") +@pytest.fixture(name="hvac_node_heat_state", scope="package") def hvac_node_heat_state_fixture() -> dict: """Load the hvac node heat state.""" return load_nodes_state("hvac_node_heat_state.json") @@ -320,7 +320,7 @@ def hvac_node_heat( return nodes[1] -@pytest.fixture(name="power_sensor_state", scope="session") +@pytest.fixture(name="power_sensor_state", scope="package") def power_sensor_state_fixture() -> dict: """Load the power sensor state.""" return load_nodes_state("power_sensor_state.json") @@ -333,7 +333,7 @@ def power_sensor(gateway_nodes: dict[int, Sensor], power_sensor_state: dict) -> return nodes[1] -@pytest.fixture(name="rgb_node_state", scope="session") +@pytest.fixture(name="rgb_node_state", scope="package") def rgb_node_state_fixture() -> dict: """Load the rgb node state.""" return load_nodes_state("rgb_node_state.json") @@ -346,7 +346,7 @@ def rgb_node(gateway_nodes: dict[int, Sensor], rgb_node_state: dict) -> Sensor: return nodes[1] -@pytest.fixture(name="rgbw_node_state", scope="session") +@pytest.fixture(name="rgbw_node_state", scope="package") def rgbw_node_state_fixture() -> dict: """Load the rgbw node state.""" return load_nodes_state("rgbw_node_state.json") @@ -359,7 +359,7 @@ def rgbw_node(gateway_nodes: dict[int, Sensor], rgbw_node_state: dict) -> Sensor return nodes[1] -@pytest.fixture(name="energy_sensor_state", scope="session") +@pytest.fixture(name="energy_sensor_state", scope="package") def energy_sensor_state_fixture() -> dict: """Load the energy sensor state.""" return load_nodes_state("energy_sensor_state.json") @@ -374,7 +374,7 @@ def energy_sensor( return nodes[1] -@pytest.fixture(name="sound_sensor_state", scope="session") +@pytest.fixture(name="sound_sensor_state", scope="package") def sound_sensor_state_fixture() -> dict: """Load the sound sensor state.""" return load_nodes_state("sound_sensor_state.json") @@ -387,7 +387,7 @@ def sound_sensor(gateway_nodes: dict[int, Sensor], sound_sensor_state: dict) -> return nodes[1] -@pytest.fixture(name="distance_sensor_state", scope="session") +@pytest.fixture(name="distance_sensor_state", scope="package") def distance_sensor_state_fixture() -> dict: """Load the distance sensor state.""" return load_nodes_state("distance_sensor_state.json") @@ -402,7 +402,7 @@ def distance_sensor( return nodes[1] -@pytest.fixture(name="ir_transceiver_state", scope="session") +@pytest.fixture(name="ir_transceiver_state", scope="package") def ir_transceiver_state_fixture() -> dict: """Load the ir transceiver state.""" return load_nodes_state("ir_transceiver_state.json") @@ -417,7 +417,7 @@ def ir_transceiver( return nodes[1] -@pytest.fixture(name="relay_node_state", scope="session") +@pytest.fixture(name="relay_node_state", scope="package") def relay_node_state_fixture() -> dict: """Load the relay node state.""" return load_nodes_state("relay_node_state.json") @@ -430,7 +430,7 @@ def relay_node(gateway_nodes: dict[int, Sensor], relay_node_state: dict) -> Sens return nodes[1] -@pytest.fixture(name="temperature_sensor_state", scope="session") +@pytest.fixture(name="temperature_sensor_state", scope="package") def temperature_sensor_state_fixture() -> dict: """Load the temperature sensor state.""" return load_nodes_state("temperature_sensor_state.json") @@ -445,7 +445,7 @@ def temperature_sensor( return nodes[1] -@pytest.fixture(name="text_node_state", scope="session") +@pytest.fixture(name="text_node_state", scope="package") def text_node_state_fixture() -> dict: """Load the text node state.""" return load_nodes_state("text_node_state.json") @@ -458,7 +458,7 @@ def text_node(gateway_nodes: dict[int, Sensor], text_node_state: dict) -> Sensor return nodes[1] -@pytest.fixture(name="battery_sensor_state", scope="session") +@pytest.fixture(name="battery_sensor_state", scope="package") def battery_sensor_state_fixture() -> dict: """Load the battery sensor state.""" return load_nodes_state("battery_sensor_state.json") diff --git a/tests/components/myuplink/conftest.py b/tests/components/myuplink/conftest.py index e08dc4255be..3ecb7e08356 100644 --- a/tests/components/myuplink/conftest.py +++ b/tests/components/myuplink/conftest.py @@ -71,7 +71,7 @@ async def setup_credentials(hass: HomeAssistant) -> None: # Fixture group for device API endpoint. -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def load_device_file() -> str: """Fixture for loading device file.""" return load_fixture("device.json", DOMAIN) @@ -92,7 +92,7 @@ def load_systems_jv_file(load_systems_file: str) -> dict[str, Any]: return json_loads(load_systems_file) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def load_systems_file() -> str: """Load fixture file for systems.""" return load_fixture("systems-2dev.json", DOMAIN) diff --git a/tests/components/nam/__init__.py b/tests/components/nam/__init__.py index 0484fc12bd6..9b254de452c 100644 --- a/tests/components/nam/__init__.py +++ b/tests/components/nam/__init__.py @@ -4,44 +4,13 @@ from unittest.mock import AsyncMock, Mock, patch from homeassistant.components.nam.const import DOMAIN -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture INCOMPLETE_NAM_DATA = { "software_version": "NAMF-2020-36", "sensordatavalues": [], } -nam_data = { - "software_version": "NAMF-2020-36", - "uptime": "456987", - "sensordatavalues": [ - {"value_type": "PMS_P0", "value": "6.00"}, - {"value_type": "PMS_P1", "value": "10.00"}, - {"value_type": "PMS_P2", "value": "11.00"}, - {"value_type": "SDS_P1", "value": "18.65"}, - {"value_type": "SDS_P2", "value": "11.03"}, - {"value_type": "SPS30_P0", "value": "31.23"}, - {"value_type": "SPS30_P1", "value": "21.23"}, - {"value_type": "SPS30_P2", "value": "34.32"}, - {"value_type": "SPS30_P4", "value": "24.72"}, - {"value_type": "conc_co2_ppm", "value": "865"}, - {"value_type": "BME280_temperature", "value": "7.56"}, - {"value_type": "BME280_humidity", "value": "45.69"}, - {"value_type": "BME280_pressure", "value": "101101.17"}, - {"value_type": "BMP_temperature", "value": "7.56"}, - {"value_type": "BMP_pressure", "value": "103201.18"}, - {"value_type": "BMP280_temperature", "value": "5.56"}, - {"value_type": "BMP280_pressure", "value": "102201.18"}, - {"value_type": "SHT3X_temperature", "value": "6.28"}, - {"value_type": "SHT3X_humidity", "value": "34.69"}, - {"value_type": "humidity", "value": "46.23"}, - {"value_type": "temperature", "value": "6.26"}, - {"value_type": "HECA_temperature", "value": "7.95"}, - {"value_type": "HECA_humidity", "value": "49.97"}, - {"value_type": "signal", "value": "-72"}, - ], -} - async def init_integration(hass, co2_sensor=True) -> MockConfigEntry: """Set up the Nettigo Air Monitor integration in Home Assistant.""" @@ -52,6 +21,8 @@ async def init_integration(hass, co2_sensor=True) -> MockConfigEntry: data={"host": "10.10.2.3"}, ) + nam_data = load_json_object_fixture("nam/nam_data.json") + if not co2_sensor: # Remove conc_co2_ppm value nam_data["sensordatavalues"].pop(6) diff --git a/tests/components/nam/fixtures/diagnostics_data.json b/tests/components/nam/fixtures/diagnostics_data.json deleted file mode 100644 index a384e8cd386..00000000000 --- a/tests/components/nam/fixtures/diagnostics_data.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "bme280_humidity": 45.7, - "bme280_pressure": 1011.012, - "bme280_temperature": 7.6, - "bmp180_pressure": 1032.012, - "bmp180_temperature": 7.6, - "bmp280_pressure": 1022.012, - "bmp280_temperature": 5.6, - "dht22_humidity": 46.2, - "dht22_temperature": 6.3, - "heca_humidity": 50.0, - "heca_temperature": 8.0, - "mhz14a_carbon_dioxide": 865.0, - "pms_caqi": 19, - "pms_caqi_level": "very_low", - "pms_p0": 6.0, - "pms_p1": 10.0, - "pms_p2": 11.0, - "sds011_caqi": 19, - "sds011_caqi_level": "very_low", - "sds011_p1": 18.6, - "sds011_p2": 11.0, - "sht3x_humidity": 34.7, - "sht3x_temperature": 6.3, - "signal": -72.0, - "sps30_caqi": 54, - "sps30_caqi_level": "medium", - "sps30_p0": 31.2, - "sps30_p1": 21.2, - "sps30_p2": 34.3, - "sps30_p4": 24.7, - "uptime": 456987 -} diff --git a/tests/components/nam/fixtures/nam_data.json b/tests/components/nam/fixtures/nam_data.json new file mode 100644 index 00000000000..93a33d4a552 --- /dev/null +++ b/tests/components/nam/fixtures/nam_data.json @@ -0,0 +1,30 @@ +{ + "software_version": "NAMF-2020-36", + "uptime": "456987", + "sensordatavalues": [ + { "value_type": "PMS_P0", "value": "6.00" }, + { "value_type": "PMS_P1", "value": "10.00" }, + { "value_type": "PMS_P2", "value": "11.00" }, + { "value_type": "SDS_P1", "value": "18.65" }, + { "value_type": "SDS_P2", "value": "11.03" }, + { "value_type": "SPS30_P0", "value": "31.23" }, + { "value_type": "SPS30_P1", "value": "21.23" }, + { "value_type": "SPS30_P2", "value": "34.32" }, + { "value_type": "SPS30_P4", "value": "24.72" }, + { "value_type": "conc_co2_ppm", "value": "865" }, + { "value_type": "BME280_temperature", "value": "7.56" }, + { "value_type": "BME280_humidity", "value": "45.69" }, + { "value_type": "BME280_pressure", "value": "101101.17" }, + { "value_type": "BMP_temperature", "value": "7.56" }, + { "value_type": "BMP_pressure", "value": "103201.18" }, + { "value_type": "BMP280_temperature", "value": "5.56" }, + { "value_type": "BMP280_pressure", "value": "102201.18" }, + { "value_type": "SHT3X_temperature", "value": "6.28" }, + { "value_type": "SHT3X_humidity", "value": "34.69" }, + { "value_type": "humidity", "value": "46.23" }, + { "value_type": "temperature", "value": "6.26" }, + { "value_type": "HECA_temperature", "value": "7.95" }, + { "value_type": "HECA_humidity", "value": "49.97" }, + { "value_type": "signal", "value": "-72" } + ] +} diff --git a/tests/components/nam/snapshots/test_diagnostics.ambr b/tests/components/nam/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..2ebc0246090 --- /dev/null +++ b/tests/components/nam/snapshots/test_diagnostics.ambr @@ -0,0 +1,41 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'data': dict({ + 'bme280_humidity': 45.7, + 'bme280_pressure': 1011.012, + 'bme280_temperature': 7.6, + 'bmp180_pressure': 1032.012, + 'bmp180_temperature': 7.6, + 'bmp280_pressure': 1022.012, + 'bmp280_temperature': 5.6, + 'dht22_humidity': 46.2, + 'dht22_temperature': 6.3, + 'heca_humidity': 50.0, + 'heca_temperature': 8.0, + 'mhz14a_carbon_dioxide': 865.0, + 'pms_caqi': 19, + 'pms_caqi_level': 'very_low', + 'pms_p0': 6.0, + 'pms_p1': 10.0, + 'pms_p2': 11.0, + 'sds011_caqi': 19, + 'sds011_caqi_level': 'very_low', + 'sds011_p1': 18.6, + 'sds011_p2': 11.0, + 'sht3x_humidity': 34.7, + 'sht3x_temperature': 6.3, + 'signal': -72.0, + 'sps30_caqi': 54, + 'sps30_caqi_level': 'medium', + 'sps30_p0': 31.2, + 'sps30_p1': 21.2, + 'sps30_p2': 34.3, + 'sps30_p4': 24.7, + 'uptime': 456987, + }), + 'info': dict({ + 'host': '10.10.2.3', + }), + }) +# --- diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..bbc655ecbb6 --- /dev/null +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -0,0 +1,1714 @@ +# serializer version: 1 +# name: test_sensor[button.nettigo_air_monitor_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.nettigo_air_monitor_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff-restart', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[button.nettigo_air_monitor_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Nettigo Air Monitor Restart', + }), + 'context': , + 'entity_id': 'button.nettigo_air_monitor_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bme280_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BME280 humidity', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bme280_humidity', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bme280_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Nettigo Air Monitor BME280 humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bme280_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45.7', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bme280_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BME280 pressure', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bme280_pressure', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bme280_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Nettigo Air Monitor BME280 pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bme280_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1011.012', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bme280_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BME280 temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bme280_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bme280_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bme280_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor BME280 temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bme280_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.6', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp180_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bmp180_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BMP180 pressure', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bmp180_pressure', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bmp180_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp180_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Nettigo Air Monitor BMP180 pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bmp180_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1032.012', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp180_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bmp180_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BMP180 temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bmp180_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bmp180_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp180_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor BMP180 temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bmp180_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.6', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp280_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bmp280_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BMP280 pressure', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bmp280_pressure', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bmp280_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp280_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Nettigo Air Monitor BMP280 pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bmp280_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1022.012', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp280_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bmp280_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BMP280 temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bmp280_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bmp280_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bmp280_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor BMP280 temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bmp280_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.6', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_dht22_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_dht22_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHT22 humidity', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dht22_humidity', + 'unique_id': 'aa:bb:cc:dd:ee:ff-dht22_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_dht22_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Nettigo Air Monitor DHT22 humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_dht22_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '46.2', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_dht22_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_dht22_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHT22 temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dht22_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-dht22_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_dht22_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor DHT22 temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_dht22_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.3', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_heca_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_heca_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HECA humidity', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heca_humidity', + 'unique_id': 'aa:bb:cc:dd:ee:ff-heca_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_heca_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Nettigo Air Monitor HECA humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_heca_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_heca_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_heca_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HECA temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heca_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-heca_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_heca_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor HECA temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_heca_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_last_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nettigo_air_monitor_last_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last restart', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_restart', + 'unique_id': 'aa:bb:cc:dd:ee:ff-uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_last_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nettigo Air Monitor Last restart', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_last_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-04-15T05:03:33+00:00', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'MH-Z14A carbon dioxide', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mhz14a_carbon_dioxide', + 'unique_id': 'aa:bb:cc:dd:ee:ff-mhz14a_carbon_dioxide', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'Nettigo Air Monitor MH-Z14A carbon dioxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '865.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_common_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_common_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'PMSx003 common air quality index', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_caqi', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_caqi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_common_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nettigo Air Monitor PMSx003 common air quality index', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_common_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PMSx003 common air quality index level', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_caqi_level', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_caqi_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Nettigo Air Monitor PMSx003 common air quality index level', + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'very_low', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PMSx003 PM1', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_pm1', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_p0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': 'Nettigo Air Monitor PMSx003 PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PMSx003 PM10', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_pm10', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_p1', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Nettigo Air Monitor PMSx003 PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PMSx003 PM2.5', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pmsx003_pm25', + 'unique_id': 'aa:bb:cc:dd:ee:ff-pms_p2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_pmsx003_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Nettigo Air Monitor PMSx003 PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_pmsx003_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_common_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sds011_common_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SDS011 common air quality index', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sds011_caqi', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sds011_caqi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_common_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nettigo Air Monitor SDS011 common air quality index', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sds011_common_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_common_air_quality_index_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sds011_common_air_quality_index_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SDS011 common air quality index level', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sds011_caqi_level', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sds011_caqi_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_common_air_quality_index_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Nettigo Air Monitor SDS011 common air quality index level', + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sds011_common_air_quality_index_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'very_low', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sds011_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SDS011 PM10', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sds011_pm10', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sds011_p1', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Nettigo Air Monitor SDS011 PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sds011_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '18.6', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sds011_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SDS011 PM2.5', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sds011_pm25', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sds011_p2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sds011_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Nettigo Air Monitor SDS011 PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sds011_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sht3x_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sht3x_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SHT3X humidity', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sht3x_humidity', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sht3x_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sht3x_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Nettigo Air Monitor SHT3X humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sht3x_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.7', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sht3x_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sht3x_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SHT3X temperature', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sht3x_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sht3x_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sht3x_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Nettigo Air Monitor SHT3X temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sht3x_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.3', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nettigo_air_monitor_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Signal strength', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff-signal', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'Nettigo Air Monitor Signal strength', + 'state_class': , + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-72.0', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_common_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_common_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SPS30 common air quality index', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_caqi', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_caqi', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_common_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nettigo Air Monitor SPS30 common air quality index', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_common_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_common_air_quality_index_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_common_air_quality_index_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SPS30 common air quality index level', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_caqi_level', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_caqi_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_common_air_quality_index_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Nettigo Air Monitor SPS30 common air quality index level', + 'options': list([ + 'very_low', + 'low', + 'medium', + 'high', + 'very_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_common_air_quality_index_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SPS30 PM1', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_pm1', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_p0', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': 'Nettigo Air Monitor SPS30 PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '31.2', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SPS30 PM10', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_pm10', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_p1', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': 'Nettigo Air Monitor SPS30 PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.2', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SPS30 PM2.5', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_pm25', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_p2', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'Nettigo Air Monitor SPS30 PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.3', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SPS30 PM4', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sps30_pm4', + 'unique_id': 'aa:bb:cc:dd:ee:ff-sps30_p4', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_sps30_pm4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nettigo Air Monitor SPS30 PM4', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_sps30_pm4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24.7', + }) +# --- diff --git a/tests/components/nam/test_diagnostics.py b/tests/components/nam/test_diagnostics.py index 9d13121392f..7ed49a37e0a 100644 --- a/tests/components/nam/test_diagnostics.py +++ b/tests/components/nam/test_diagnostics.py @@ -1,25 +1,23 @@ """Test NAM diagnostics.""" -import json +from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant from . import init_integration -from tests.common import load_fixture from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" entry = await init_integration(hass) - diagnostics_data = json.loads(load_fixture("diagnostics_data.json", "nam")) - result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result["info"] == {"host": "10.10.2.3"} - assert result["data"] == diagnostics_data + assert result == snapshot diff --git a/tests/components/nam/test_sensor.py b/tests/components/nam/test_sensor.py index c88a34ae497..2b307b4b02a 100644 --- a/tests/components/nam/test_sensor.py +++ b/tests/components/nam/test_sensor.py @@ -3,27 +3,18 @@ from datetime import timedelta from unittest.mock import AsyncMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory from nettigo_air_monitor import ApiError +from syrupy import SnapshotAssertion from homeassistant.components.nam.const import DOMAIN -from homeassistant.components.sensor import ( - ATTR_OPTIONS, - ATTR_STATE_CLASS, - DOMAIN as SENSOR_DOMAIN, - SensorDeviceClass, - SensorStateClass, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN, SensorDeviceClass from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, - ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - CONCENTRATION_PARTS_PER_MILLION, - PERCENTAGE, - SIGNAL_STRENGTH_DECIBELS_MILLIWATT, STATE_UNAVAILABLE, - UnitOfPressure, + Platform, UnitOfTemperature, ) from homeassistant.core import HomeAssistant @@ -31,447 +22,30 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow -from . import INCOMPLETE_NAM_DATA, init_integration, nam_data +from . import INCOMPLETE_NAM_DATA, init_integration -from tests.common import async_fire_time_changed +from tests.common import ( + async_fire_time_changed, + load_json_object_fixture, + snapshot_platform, +) -async def test_sensor(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_sensor( + hass: HomeAssistant, + entity_registry_enabled_by_default: None, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: """Test states of the air_quality.""" - entity_registry.async_get_or_create( - SENSOR_DOMAIN, - DOMAIN, - "aa:bb:cc:dd:ee:ff-signal", - suggested_object_id="nettigo_air_monitor_signal_strength", - disabled_by=None, - ) + hass.config.set_time_zone("UTC") + freezer.move_to("2024-04-20 12:00:00+00:00") - entity_registry.async_get_or_create( - SENSOR_DOMAIN, - DOMAIN, - "aa:bb:cc:dd:ee:ff-uptime", - suggested_object_id="nettigo_air_monitor_uptime", - disabled_by=None, - ) + with patch("homeassistant.components.nam.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass) - # Patch return value from utcnow, with offset to make sure the patch is correct - now = utcnow() - timedelta(hours=1) - with patch("homeassistant.components.nam.sensor.utcnow", return_value=now): - await init_integration(hass) - - state = hass.states.get("sensor.nettigo_air_monitor_bme280_humidity") - assert state - assert state.state == "45.7" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_humidity") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_humidity" - - state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") - assert state - assert state.state == "7.6" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_bme280_pressure") - assert state - assert state.state == "1011.012" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PRESSURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bme280_pressure") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bme280_pressure" - - state = hass.states.get("sensor.nettigo_air_monitor_bmp180_temperature") - assert state - assert state.state == "7.6" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp180_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp180_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_bmp180_pressure") - assert state - assert state.state == "1032.012" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PRESSURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp180_pressure") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp180_pressure" - - state = hass.states.get("sensor.nettigo_air_monitor_bmp280_temperature") - assert state - assert state.state == "5.6" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp280_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp280_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_bmp280_pressure") - assert state - assert state.state == "1022.012" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PRESSURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPressure.HPA - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_bmp280_pressure") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-bmp280_pressure" - - state = hass.states.get("sensor.nettigo_air_monitor_sht3x_humidity") - assert state - assert state.state == "34.7" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sht3x_humidity") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sht3x_humidity" - - state = hass.states.get("sensor.nettigo_air_monitor_sht3x_temperature") - assert state - assert state.state == "6.3" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sht3x_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sht3x_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_dht22_humidity") - assert state - assert state.state == "46.2" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_dht22_humidity") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-dht22_humidity" - - state = hass.states.get("sensor.nettigo_air_monitor_dht22_temperature") - assert state - assert state.state == "6.3" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_dht22_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-dht22_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_heca_humidity") - assert state - assert state.state == "50.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_heca_humidity") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-heca_humidity" - - state = hass.states.get("sensor.nettigo_air_monitor_heca_temperature") - assert state - assert state.state == "8.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_heca_temperature") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-heca_temperature" - - state = hass.states.get("sensor.nettigo_air_monitor_signal_strength") - assert state - assert state.state == "-72.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.SIGNAL_STRENGTH - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == SIGNAL_STRENGTH_DECIBELS_MILLIWATT - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_signal_strength") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-signal" - - state = hass.states.get("sensor.nettigo_air_monitor_uptime") - assert state - assert ( - state.state - == (now - timedelta(seconds=456987)).replace(microsecond=0).isoformat() - ) - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP - assert state.attributes.get(ATTR_STATE_CLASS) is None - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_uptime") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-uptime" - - state = hass.states.get( - "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level" - ) - assert state - assert state.state == "very_low" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_low", - "low", - "medium", - "high", - "very_high", - ] - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index_level" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_caqi_level" - assert entry.translation_key == "pmsx003_caqi_level" - - state = hass.states.get( - "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index" - ) - assert state - assert state.state == "19" - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_pmsx003_common_air_quality_index" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_caqi" - - state = hass.states.get("sensor.nettigo_air_monitor_pmsx003_pm10") - assert state - assert state.state == "10.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM10 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm10") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p1" - - state = hass.states.get("sensor.nettigo_air_monitor_pmsx003_pm2_5") - assert state - assert state.state == "11.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM25 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm2_5") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p2" - - state = hass.states.get("sensor.nettigo_air_monitor_pmsx003_pm1") - assert state - assert state.state == "6.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM1 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_pmsx003_pm1") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-pms_p0" - - state = hass.states.get("sensor.nettigo_air_monitor_sds011_pm10") - assert state - assert state.state == "18.6" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM10 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sds011_pm10") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_p1" - - state = hass.states.get( - "sensor.nettigo_air_monitor_sds011_common_air_quality_index" - ) - assert state - assert state.state == "19" - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_sds011_common_air_quality_index" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_caqi" - - state = hass.states.get( - "sensor.nettigo_air_monitor_sds011_common_air_quality_index_level" - ) - assert state - assert state.state == "very_low" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_low", - "low", - "medium", - "high", - "very_high", - ] - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_sds011_common_air_quality_index_level" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_caqi_level" - assert entry.translation_key == "sds011_caqi_level" - - state = hass.states.get("sensor.nettigo_air_monitor_sds011_pm2_5") - assert state - assert state.state == "11.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM25 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sds011_pm2_5") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sds011_p2" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_common_air_quality_index") - assert state - assert state.state == "54" - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_sps30_common_air_quality_index" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_caqi" - - state = hass.states.get( - "sensor.nettigo_air_monitor_sps30_common_air_quality_index_level" - ) - assert state - assert state.state == "medium" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM - assert state.attributes.get(ATTR_OPTIONS) == [ - "very_low", - "low", - "medium", - "high", - "very_high", - ] - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_sps30_common_air_quality_index_level" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_caqi_level" - assert entry.translation_key == "sps30_caqi_level" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_pm1") - assert state - assert state.state == "31.2" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM1 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm1") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p0" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_pm10") - assert state - assert state.state == "21.2" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM10 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm10") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p1" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_pm2_5") - assert state - assert state.state == "34.3" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.PM25 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm2_5") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p2" - - state = hass.states.get("sensor.nettigo_air_monitor_sps30_pm4") - assert state - assert state.state == "24.7" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - ) - assert state.attributes.get(ATTR_ICON) is None - - entry = entity_registry.async_get("sensor.nettigo_air_monitor_sps30_pm4") - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-sps30_p4" - - state = hass.states.get("sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide") - assert state - assert state.state == "865.0" - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.CO2 - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert ( - state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - == CONCENTRATION_PARTS_PER_MILLION - ) - entry = entity_registry.async_get( - "sensor.nettigo_air_monitor_mh_z14a_carbon_dioxide" - ) - assert entry - assert entry.unique_id == "aa:bb:cc:dd:ee:ff-mhz14a_carbon_dioxide" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_sensor_disabled( @@ -524,6 +98,8 @@ async def test_incompleta_data_after_device_restart(hass: HomeAssistant) -> None async def test_availability(hass: HomeAssistant) -> None: """Ensure that we mark the entities unavailable correctly when device causes an error.""" + nam_data = load_json_object_fixture("nam/nam_data.json") + await init_integration(hass) state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") @@ -566,6 +142,8 @@ async def test_availability(hass: HomeAssistant) -> None: async def test_manual_update_entity(hass: HomeAssistant) -> None: """Test manual update entity via service homeasasistant/update_entity.""" + nam_data = load_json_object_fixture("nam/nam_data.json") + await init_integration(hass) await async_setup_component(hass, "homeassistant", {}) diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index def99633435..419b3648124 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -399,7 +399,7 @@ async def test_camera_event( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -572,7 +572,7 @@ async def test_multiple_image_events_in_session( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT + b"-2" @@ -585,7 +585,7 @@ async def test_multiple_image_events_in_session( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT + b"-1" @@ -673,7 +673,7 @@ async def test_multiple_clip_preview_events_in_session( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -685,7 +685,7 @@ async def test_multiple_clip_preview_events_in_session( assert media.mime_type == "video/mp4" response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -888,7 +888,7 @@ async def test_camera_event_clip_preview( client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == mp4.getvalue() @@ -896,7 +896,7 @@ async def test_camera_event_clip_preview( response = await client.get( f"/api/nest/event_media/{device.id}/{event_identifier}/thumbnail" ) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" await response.read() # Animated gif format not tested @@ -907,9 +907,7 @@ async def test_event_media_render_invalid_device_id( await setup_platform() client = await hass_client() response = await client.get("/api/nest/event_media/invalid-device-id") - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert response.status == HTTPStatus.NOT_FOUND, f"Response not matched: {response}" async def test_event_media_render_invalid_event_id( @@ -924,9 +922,7 @@ async def test_event_media_render_invalid_event_id( client = await hass_client() response = await client.get(f"/api/nest/event_media/{device.id}/invalid-event-id") - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert response.status == HTTPStatus.NOT_FOUND, f"Response not matched: {response}" async def test_event_media_failure( @@ -981,9 +977,7 @@ async def test_event_media_failure( # Media is not available to be fetched client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert response.status == HTTPStatus.NOT_FOUND, f"Response not matched: {response}" async def test_media_permission_unauthorized( @@ -1011,9 +1005,9 @@ async def test_media_permission_unauthorized( client = await hass_client() response = await client.get(media_url) - assert response.status == HTTPStatus.UNAUTHORIZED, ( - "Response not matched: %s" % response - ) + assert ( + response.status == HTTPStatus.UNAUTHORIZED + ), f"Response not matched: {response}" async def test_multiple_devices( @@ -1157,7 +1151,7 @@ async def test_media_store_persistence( # Fetch event media client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -1198,7 +1192,7 @@ async def test_media_store_persistence( # Verify media exists response = await client.get(media.url) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT @@ -1254,9 +1248,7 @@ async def test_media_store_save_filesystem_error( # We fail to retrieve the media from the server since the origin filesystem op failed client = await hass_client() response = await client.get(media.url) - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert response.status == HTTPStatus.NOT_FOUND, f"Response not matched: {response}" async def test_media_store_load_filesystem_error( @@ -1307,9 +1299,9 @@ async def test_media_store_load_filesystem_error( response = await client.get( f"/api/nest/event_media/{device.id}/{event_identifier}" ) - assert response.status == HTTPStatus.NOT_FOUND, ( - "Response not matched: %s" % response - ) + assert ( + response.status == HTTPStatus.NOT_FOUND + ), f"Response not matched: {response}" @pytest.mark.parametrize(("device_traits", "cache_size"), [(BATTERY_CAMERA_TRAITS, 5)]) @@ -1384,7 +1376,7 @@ async def test_camera_event_media_eviction( for i in reversed(range(3, 8)): child_event = next(child_events) response = await client.get(f"/api/nest/event_media/{child_event.identifier}") - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == f"image-bytes-{i}".encode() await hass.async_block_till_done() @@ -1444,7 +1436,7 @@ async def test_camera_image_resize( client = await hass_client() response = await client.get(browse.thumbnail) - assert response.status == HTTPStatus.OK, "Response not matched: %s" % response + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" contents = await response.read() assert contents == IMAGE_BYTES_FROM_EVENT diff --git a/tests/components/nextdns/snapshots/test_binary_sensor.ambr b/tests/components/nextdns/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..bd4ecbba084 --- /dev/null +++ b/tests/components/nextdns/snapshots/test_binary_sensor.ambr @@ -0,0 +1,2277 @@ +# serializer version: 1 +# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_Sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.fake_profile_device_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_connection_status', + 'unique_id': 'xyz12_this_device_nextdns_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.fake_profile_device_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.fake_profile_device_profile_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device profile connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_profile_connection_status', + 'unique_id': 'xyz12_this_device_profile_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.fake_profile_device_profile_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device profile connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextdns/snapshots/test_button.ambr b/tests/components/nextdns/snapshots/test_button.ambr new file mode 100644 index 00000000000..32dc31eea19 --- /dev/null +++ b/tests/components/nextdns/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_button[button.fake_profile_clear_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.fake_profile_clear_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clear logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clear_logs', + 'unique_id': 'xyz12_clear_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[button.fake_profile_clear_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Clear logs', + }), + 'context': , + 'entity_id': 'button.fake_profile_clear_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/nextdns/snapshots/test_sensor.ambr b/tests/components/nextdns/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..34b40433e3b --- /dev/null +++ b/tests/components/nextdns/snapshots/test_sensor.ambr @@ -0,0 +1,4749 @@ +# serializer version: 1 +# name: test_sensor[binary_sensor.fake_profile_device_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_connection_status', + 'unique_id': 'xyz12_this_device_nextdns_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device profile connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_profile_connection_status', + 'unique_id': 'xyz12_this_device_profile_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device profile connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[button.fake_profile_clear_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.fake_profile_clear_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clear logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clear_logs', + 'unique_id': 'xyz12_clear_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[button.fake_profile_clear_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Clear logs', + }), + 'context': , + 'entity_id': 'button.fake_profile_clear_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_http_3_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries', + 'unique_id': 'xyz12_doh3_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_http_3_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_http_3_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries_ratio', + 'unique_id': 'xyz12_doh3_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_http_3_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_https_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries', + 'unique_id': 'xyz12_doh_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_https_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_https_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries_ratio', + 'unique_id': 'xyz12_doh_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_https_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.4', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_quic_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries', + 'unique_id': 'xyz12_doq_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_quic_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_quic_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries_ratio', + 'unique_id': 'xyz12_doq_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_quic_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.7', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_tls_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries', + 'unique_id': 'xyz12_dot_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_tls_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_tls_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries_ratio', + 'unique_id': 'xyz12_dot_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_over_tls_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26.1', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'all_queries', + 'unique_id': 'xyz12_all_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_blocked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries', + 'unique_id': 'xyz12_blocked_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_blocked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_blocked_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries_ratio', + 'unique_id': 'xyz12_blocked_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_blocked_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_relayed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries relayed', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relayed_queries', + 'unique_id': 'xyz12_relayed_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dns_queries_relayed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries relayed', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_not_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC not validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'not_validated_queries', + 'unique_id': 'xyz12_not_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_not_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC not validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries', + 'unique_id': 'xyz12_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_validated_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries_ratio', + 'unique_id': 'xyz12_validated_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_dnssec_validated_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_encrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries', + 'unique_id': 'xyz12_encrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_encrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_sensor[sensor.fake_profile_encrypted_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries_ratio', + 'unique_id': 'xyz12_encrypted_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_encrypted_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv4_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv4 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv4_queries', + 'unique_id': 'xyz12_ipv4_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv4_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv4 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '90', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv6_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries', + 'unique_id': 'xyz12_ipv6_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv6_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv6_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries_ratio', + 'unique_id': 'xyz12_ipv6_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_ipv6_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_tcp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries', + 'unique_id': 'xyz12_tcp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_tcp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[sensor.fake_profile_tcp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries_ratio', + 'unique_id': 'xyz12_tcp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_tcp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[sensor.fake_profile_udp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries', + 'unique_id': 'xyz12_udp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_udp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_sensor[sensor.fake_profile_udp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries_ratio', + 'unique_id': 'xyz12_udp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.fake_profile_udp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.8', + }) +# --- +# name: test_sensor[sensor.fake_profile_unencrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Unencrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'unencrypted_queries', + 'unique_id': 'xyz12_unencrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_sensor[sensor.fake_profile_unencrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Unencrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_9gag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block 9GAG', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_amazon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Amazon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_amazon', + 'unique_id': 'xyz12_block_amazon', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_amazon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Amazon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_bereal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block BeReal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bereal', + 'unique_id': 'xyz12_block_bereal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_bereal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block BeReal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_blizzard-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Blizzard', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_blizzard', + 'unique_id': 'xyz12_block_blizzard', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_blizzard-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Blizzard', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_chatgpt-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block ChatGPT', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_chatgpt', + 'unique_id': 'xyz12_block_chatgpt', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_chatgpt-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block ChatGPT', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dailymotion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Dailymotion', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dailymotion', + 'unique_id': 'xyz12_block_dailymotion', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dailymotion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Dailymotion', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dating', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dating', + 'unique_id': 'xyz12_block_dating', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dating', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_discord-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_discord', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Discord', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_discord', + 'unique_id': 'xyz12_block_discord', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_discord-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Discord', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_discord', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_disney_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Disney Plus', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disneyplus', + 'unique_id': 'xyz12_block_disneyplus', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_disney_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Disney Plus', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_ebay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block eBay', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ebay', + 'unique_id': 'xyz12_block_ebay', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_ebay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block eBay', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_facebook-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Facebook', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_facebook', + 'unique_id': 'xyz12_block_facebook', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_facebook-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Facebook', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_fortnite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Fortnite', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_fortnite', + 'unique_id': 'xyz12_block_fortnite', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_fortnite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Fortnite', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_gambling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block gambling', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_gambling', + 'unique_id': 'xyz12_block_gambling', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_gambling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block gambling', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_google_chat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Google Chat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_google_chat', + 'unique_id': 'xyz12_block_google_chat', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_google_chat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Google Chat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_hbo_max-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block HBO Max', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_hbomax', + 'unique_id': 'xyz12_block_hbomax', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_hbo_max-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block HBO Max', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_hulu-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Hulu', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xyz12_block_hulu', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_hulu-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Hulu', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_imgur-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Imgur', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_imgur', + 'unique_id': 'xyz12_block_imgur', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_imgur-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Imgur', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_instagram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Instagram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_instagram', + 'unique_id': 'xyz12_block_instagram', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_instagram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Instagram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_league_of_legends-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block League of Legends', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_leagueoflegends', + 'unique_id': 'xyz12_block_leagueoflegends', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_league_of_legends-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block League of Legends', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_mastodon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Mastodon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_mastodon', + 'unique_id': 'xyz12_block_mastodon', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_mastodon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Mastodon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_messenger-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Messenger', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_messenger', + 'unique_id': 'xyz12_block_messenger', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_messenger-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Messenger', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_minecraft-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Minecraft', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_minecraft', + 'unique_id': 'xyz12_block_minecraft', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_minecraft-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Minecraft', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_netflix-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Netflix', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_netflix', + 'unique_id': 'xyz12_block_netflix', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_netflix-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Netflix', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_online_gaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block online gaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_online_gaming', + 'unique_id': 'xyz12_block_online_gaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_online_gaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block online gaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_pinterest-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Pinterest', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_pinterest', + 'unique_id': 'xyz12_block_pinterest', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_pinterest-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Pinterest', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_piracy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block piracy', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_piracy', + 'unique_id': 'xyz12_block_piracy', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_piracy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block piracy', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_playstation_network-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block PlayStation Network', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_playstation_network', + 'unique_id': 'xyz12_block_playstation_network', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_playstation_network-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block PlayStation Network', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_porn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_porn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block porn', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_porn', + 'unique_id': 'xyz12_block_porn', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_porn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block porn', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_porn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_prime_video-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Prime Video', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_primevideo', + 'unique_id': 'xyz12_block_primevideo', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_prime_video-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Prime Video', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_reddit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Reddit', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_reddit', + 'unique_id': 'xyz12_block_reddit', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_reddit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Reddit', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_roblox-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Roblox', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_roblox', + 'unique_id': 'xyz12_block_roblox', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_roblox-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Roblox', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_signal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_signal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Signal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_signal', + 'unique_id': 'xyz12_block_signal', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_signal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Signal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_signal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_skype-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_skype', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Skype', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_skype', + 'unique_id': 'xyz12_block_skype', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_skype-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Skype', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_skype', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_snapchat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Snapchat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_snapchat', + 'unique_id': 'xyz12_block_snapchat', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_snapchat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Snapchat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_social_networks-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block social networks', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_social_networks', + 'unique_id': 'xyz12_block_social_networks', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_social_networks-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block social networks', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_spotify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Spotify', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_spotify', + 'unique_id': 'xyz12_block_spotify', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_spotify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Spotify', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_steam-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_steam', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Steam', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_steam', + 'unique_id': 'xyz12_block_steam', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_steam-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Steam', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_steam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_telegram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Telegram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_telegram', + 'unique_id': 'xyz12_block_telegram', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_telegram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Telegram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tiktok-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block TikTok', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tiktok', + 'unique_id': 'xyz12_block_tiktok', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tiktok-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block TikTok', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tinder-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tinder', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tinder', + 'unique_id': 'xyz12_block_tinder', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tinder-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tinder', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_tumblr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tumblr', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tumblr', + 'unique_id': 'xyz12_block_tumblr', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_tumblr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tumblr', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_twitch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Twitch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitch', + 'unique_id': 'xyz12_block_twitch', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_twitch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Twitch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_video_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block video streaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_video_streaming', + 'unique_id': 'xyz12_block_video_streaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_video_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block video streaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_vimeo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Vimeo', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vimeo', + 'unique_id': 'xyz12_block_vimeo', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_vimeo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Vimeo', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_vk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block VK', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vk', + 'unique_id': 'xyz12_block_vk', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_vk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block VK', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_whatsapp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block WhatsApp', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_whatsapp', + 'unique_id': 'xyz12_block_whatsapp', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_whatsapp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block WhatsApp', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block X (formerly Twitter)', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitter', + 'unique_id': 'xyz12_block_twitter', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block X (formerly Twitter)', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_xbox_live-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Xbox Live', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_xboxlive', + 'unique_id': 'xyz12_block_xboxlive', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_xbox_live-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Xbox Live', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_youtube-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block YouTube', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_youtube', + 'unique_id': 'xyz12_block_youtube', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_youtube-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block YouTube', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_block_zoom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Zoom', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_zoom', + 'unique_id': 'xyz12_block_zoom', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_block_zoom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Zoom', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextdns/snapshots/test_switch.ambr b/tests/components/nextdns/snapshots/test_switch.ambr new file mode 100644 index 00000000000..8472f02e8c5 --- /dev/null +++ b/tests/components/nextdns/snapshots/test_switch.ambr @@ -0,0 +1,4749 @@ +# serializer version: 1 +# name: test_switch[binary_sensor.fake_profile_device_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_connection_status', + 'unique_id': 'xyz12_this_device_nextdns_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Device profile connection status', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_profile_connection_status', + 'unique_id': 'xyz12_this_device_profile_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Fake Profile Device profile connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[button.fake_profile_clear_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.fake_profile_clear_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clear logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'clear_logs', + 'unique_id': 'xyz12_clear_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[button.fake_profile_clear_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Clear logs', + }), + 'context': , + 'entity_id': 'button.fake_profile_clear_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries', + 'unique_id': 'xyz12_doh3_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTP/3 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh3_queries_ratio', + 'unique_id': 'xyz12_doh3_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13.0', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries', + 'unique_id': 'xyz12_doh_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-HTTPS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doh_queries_ratio', + 'unique_id': 'xyz12_doh_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-HTTPS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.4', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries', + 'unique_id': 'xyz12_doq_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-QUIC queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'doq_queries_ratio', + 'unique_id': 'xyz12_doq_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-QUIC queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.7', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries', + 'unique_id': 'xyz12_dot_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS-over-TLS queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dot_queries_ratio', + 'unique_id': 'xyz12_dot_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS-over-TLS queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26.1', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'all_queries', + 'unique_id': 'xyz12_all_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries', + 'unique_id': 'xyz12_blocked_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries blocked ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blocked_queries_ratio', + 'unique_id': 'xyz12_blocked_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries blocked ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.0', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_relayed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS queries relayed', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relayed_queries', + 'unique_id': 'xyz12_relayed_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dns_queries_relayed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS queries relayed', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dns_queries_relayed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC not validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'not_validated_queries', + 'unique_id': 'xyz12_not_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC not validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries', + 'unique_id': 'xyz12_validated_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNSSEC validated queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'validated_queries_ratio', + 'unique_id': 'xyz12_validated_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNSSEC validated queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75.0', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries', + 'unique_id': 'xyz12_encrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Encrypted queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'encrypted_queries_ratio', + 'unique_id': 'xyz12_encrypted_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Encrypted queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60.0', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv4_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv4 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv4_queries', + 'unique_id': 'xyz12_ipv4_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv4_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv4 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv4_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '90', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries', + 'unique_id': 'xyz12_ipv6_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IPv6 queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv6_queries_ratio', + 'unique_id': 'xyz12_ipv6_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IPv6 queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries', + 'unique_id': 'xyz12_tcp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TCP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tcp_queries_ratio', + 'unique_id': 'xyz12_tcp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_tcp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile TCP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries', + 'unique_id': 'xyz12_udp_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'UDP queries ratio', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'udp_queries_ratio', + 'unique_id': 'xyz12_udp_queries_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_switch[sensor.fake_profile_udp_queries_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile UDP queries ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_udp_queries_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.8', + }) +# --- +# name: test_switch[sensor.fake_profile_unencrypted_queries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Unencrypted queries', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'unencrypted_queries', + 'unique_id': 'xyz12_unencrypted_queries', + 'unit_of_measurement': 'queries', + }) +# --- +# name: test_switch[sensor.fake_profile_unencrypted_queries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Unencrypted queries', + 'state_class': , + 'unit_of_measurement': 'queries', + }), + 'context': , + 'entity_id': 'sensor.fake_profile_unencrypted_queries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_switch[switch.fake_profile_ai_driven_threat_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AI-Driven threat detection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ai_threat_detection', + 'unique_id': 'xyz12_ai_threat_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_ai_driven_threat_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile AI-Driven threat detection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_allow_affiliate_tracking_links-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow affiliate & tracking links', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'allow_affiliate', + 'unique_id': 'xyz12_allow_affiliate', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_allow_affiliate_tracking_links-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Allow affiliate & tracking links', + }), + 'context': , + 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_anonymized_edns_client_subnet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Anonymized EDNS client subnet', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'anonymized_ecs', + 'unique_id': 'xyz12_anonymized_ecs', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_anonymized_edns_client_subnet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', + }), + 'context': , + 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_9gag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block 9GAG', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_9gag', + 'unique_id': 'xyz12_block_9gag', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_9gag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block 9GAG', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_9gag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_amazon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Amazon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_amazon', + 'unique_id': 'xyz12_block_amazon', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_amazon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Amazon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_amazon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_bereal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block BeReal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bereal', + 'unique_id': 'xyz12_block_bereal', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_bereal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block BeReal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bereal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_blizzard-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Blizzard', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_blizzard', + 'unique_id': 'xyz12_block_blizzard', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_blizzard-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Blizzard', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_blizzard', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_bypass_methods-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block bypass methods', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_bypass_methods', + 'unique_id': 'xyz12_block_bypass_methods', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_bypass_methods-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block bypass methods', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_bypass_methods', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_chatgpt-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block ChatGPT', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_chatgpt', + 'unique_id': 'xyz12_block_chatgpt', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_chatgpt-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block ChatGPT', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_chatgpt', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_child_sexual_abuse_material-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block child sexual abuse material', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_csam', + 'unique_id': 'xyz12_block_csam', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_child_sexual_abuse_material-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block child sexual abuse material', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_dailymotion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Dailymotion', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dailymotion', + 'unique_id': 'xyz12_block_dailymotion', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_dailymotion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Dailymotion', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dailymotion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_dating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dating', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_dating', + 'unique_id': 'xyz12_block_dating', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_dating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dating', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_discord-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_discord', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Discord', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_discord', + 'unique_id': 'xyz12_block_discord', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_discord-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Discord', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_discord', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_disguised_third_party_trackers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block disguised third-party trackers', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disguised_trackers', + 'unique_id': 'xyz12_block_disguised_trackers', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_disguised_third_party_trackers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block disguised third-party trackers', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_disney_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Disney Plus', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_disneyplus', + 'unique_id': 'xyz12_block_disneyplus', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_disney_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Disney Plus', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_disney_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_dynamic_dns_hostnames-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block dynamic DNS hostnames', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ddns', + 'unique_id': 'xyz12_block_ddns', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_dynamic_dns_hostnames-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_ebay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block eBay', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_ebay', + 'unique_id': 'xyz12_block_ebay', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_ebay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block eBay', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_ebay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_facebook-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Facebook', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_facebook', + 'unique_id': 'xyz12_block_facebook', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_facebook-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Facebook', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_facebook', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_fortnite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Fortnite', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_fortnite', + 'unique_id': 'xyz12_block_fortnite', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_fortnite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Fortnite', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_fortnite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_gambling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block gambling', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_gambling', + 'unique_id': 'xyz12_block_gambling', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_gambling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block gambling', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_gambling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_google_chat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Google Chat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_google_chat', + 'unique_id': 'xyz12_block_google_chat', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_google_chat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Google Chat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_google_chat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_hbo_max-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block HBO Max', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_hbomax', + 'unique_id': 'xyz12_block_hbomax', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_hbo_max-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block HBO Max', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hbo_max', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_hulu-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Hulu', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xyz12_block_hulu', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_hulu-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Hulu', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_hulu', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_imgur-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Imgur', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_imgur', + 'unique_id': 'xyz12_block_imgur', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_imgur-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Imgur', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_imgur', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_instagram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Instagram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_instagram', + 'unique_id': 'xyz12_block_instagram', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_instagram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Instagram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_instagram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_league_of_legends-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block League of Legends', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_leagueoflegends', + 'unique_id': 'xyz12_block_leagueoflegends', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_league_of_legends-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block League of Legends', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_league_of_legends', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_mastodon-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Mastodon', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_mastodon', + 'unique_id': 'xyz12_block_mastodon', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_mastodon-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Mastodon', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_mastodon', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_messenger-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Messenger', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_messenger', + 'unique_id': 'xyz12_block_messenger', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_messenger-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Messenger', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_messenger', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_minecraft-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Minecraft', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_minecraft', + 'unique_id': 'xyz12_block_minecraft', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_minecraft-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Minecraft', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_minecraft', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_netflix-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Netflix', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_netflix', + 'unique_id': 'xyz12_block_netflix', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_netflix-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Netflix', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_netflix', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_newly_registered_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block newly registered domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_nrd', + 'unique_id': 'xyz12_block_nrd', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_newly_registered_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block newly registered domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_newly_registered_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_online_gaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block online gaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_online_gaming', + 'unique_id': 'xyz12_block_online_gaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_online_gaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block online gaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_online_gaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_page-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_page', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block page', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_page', + 'unique_id': 'xyz12_block_page', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_page-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block page', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_page', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.fake_profile_block_parked_domains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block parked domains', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_parked_domains', + 'unique_id': 'xyz12_block_parked_domains', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_parked_domains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block parked domains', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_parked_domains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_pinterest-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Pinterest', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_pinterest', + 'unique_id': 'xyz12_block_pinterest', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_pinterest-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Pinterest', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_pinterest', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_piracy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block piracy', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_piracy', + 'unique_id': 'xyz12_block_piracy', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_piracy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block piracy', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_piracy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_playstation_network-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block PlayStation Network', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_playstation_network', + 'unique_id': 'xyz12_block_playstation_network', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_playstation_network-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block PlayStation Network', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_playstation_network', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_porn-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_porn', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block porn', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_porn', + 'unique_id': 'xyz12_block_porn', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_porn-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block porn', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_porn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_prime_video-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Prime Video', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_primevideo', + 'unique_id': 'xyz12_block_primevideo', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_prime_video-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Prime Video', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_prime_video', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_reddit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Reddit', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_reddit', + 'unique_id': 'xyz12_block_reddit', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_reddit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Reddit', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_reddit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_roblox-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Roblox', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_roblox', + 'unique_id': 'xyz12_block_roblox', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_roblox-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Roblox', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_roblox', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_signal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_signal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Signal', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_signal', + 'unique_id': 'xyz12_block_signal', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_signal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Signal', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_signal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_skype-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_skype', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Skype', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_skype', + 'unique_id': 'xyz12_block_skype', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_skype-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Skype', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_skype', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_snapchat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Snapchat', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_snapchat', + 'unique_id': 'xyz12_block_snapchat', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_snapchat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Snapchat', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_snapchat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_social_networks-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block social networks', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_social_networks', + 'unique_id': 'xyz12_block_social_networks', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_social_networks-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block social networks', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_social_networks', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_spotify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Spotify', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_spotify', + 'unique_id': 'xyz12_block_spotify', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_spotify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Spotify', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_spotify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_steam-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_steam', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Steam', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_steam', + 'unique_id': 'xyz12_block_steam', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_steam-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Steam', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_steam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_telegram-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Telegram', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_telegram', + 'unique_id': 'xyz12_block_telegram', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_telegram-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Telegram', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_telegram', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_tiktok-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block TikTok', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tiktok', + 'unique_id': 'xyz12_block_tiktok', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_tiktok-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block TikTok', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tiktok', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_tinder-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tinder', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tinder', + 'unique_id': 'xyz12_block_tinder', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_tinder-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tinder', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tinder', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_tumblr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Tumblr', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_tumblr', + 'unique_id': 'xyz12_block_tumblr', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_tumblr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Tumblr', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_tumblr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_twitch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Twitch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitch', + 'unique_id': 'xyz12_block_twitch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_twitch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Twitch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_twitch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_video_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block video streaming', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_video_streaming', + 'unique_id': 'xyz12_block_video_streaming', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_video_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block video streaming', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_video_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_vimeo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Vimeo', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vimeo', + 'unique_id': 'xyz12_block_vimeo', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_vimeo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Vimeo', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vimeo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_vk-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_vk', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block VK', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_vk', + 'unique_id': 'xyz12_block_vk', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_vk-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block VK', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_vk', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_whatsapp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block WhatsApp', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_whatsapp', + 'unique_id': 'xyz12_block_whatsapp', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_whatsapp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block WhatsApp', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_whatsapp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_x_formerly_twitter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block X (formerly Twitter)', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_twitter', + 'unique_id': 'xyz12_block_twitter', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_x_formerly_twitter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block X (formerly Twitter)', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_xbox_live-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Xbox Live', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_xboxlive', + 'unique_id': 'xyz12_block_xboxlive', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_xbox_live-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Xbox Live', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_xbox_live', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_youtube-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block YouTube', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_youtube', + 'unique_id': 'xyz12_block_youtube', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_youtube-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block YouTube', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_youtube', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_block_zoom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Block Zoom', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_zoom', + 'unique_id': 'xyz12_block_zoom', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_block_zoom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Block Zoom', + }), + 'context': , + 'entity_id': 'switch.fake_profile_block_zoom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_cache_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache boost', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cache_boost', + 'unique_id': 'xyz12_cache_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_cache_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cache boost', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cache_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_cname_flattening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CNAME flattening', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cname_flattening', + 'unique_id': 'xyz12_cname_flattening', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_cname_flattening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile CNAME flattening', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cname_flattening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_cryptojacking_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cryptojacking protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cryptojacking_protection', + 'unique_id': 'xyz12_cryptojacking_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_cryptojacking_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Cryptojacking protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_cryptojacking_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_dns_rebinding_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DNS rebinding protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dns_rebinding_protection', + 'unique_id': 'xyz12_dns_rebinding_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_dns_rebinding_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile DNS rebinding protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_dns_rebinding_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_domain_generation_algorithms_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domain generation algorithms protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dga_protection', + 'unique_id': 'xyz12_dga_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_domain_generation_algorithms_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Domain generation algorithms protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_force_safesearch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force SafeSearch', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'safesearch', + 'unique_id': 'xyz12_safesearch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_force_safesearch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force SafeSearch', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_safesearch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.fake_profile_force_youtube_restricted_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force YouTube restricted mode', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'youtube_restricted_mode', + 'unique_id': 'xyz12_youtube_restricted_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_force_youtube_restricted_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Force YouTube restricted mode', + }), + 'context': , + 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.fake_profile_google_safe_browsing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Google safe browsing', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'google_safe_browsing', + 'unique_id': 'xyz12_google_safe_browsing', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_google_safe_browsing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Google safe browsing', + }), + 'context': , + 'entity_id': 'switch.fake_profile_google_safe_browsing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[switch.fake_profile_idn_homograph_attacks_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IDN homograph attacks protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'idn_homograph_attacks_protection', + 'unique_id': 'xyz12_idn_homograph_attacks_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_idn_homograph_attacks_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile IDN homograph attacks protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_logs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_logs', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Logs', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'logs', + 'unique_id': 'xyz12_logs', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_logs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Logs', + }), + 'context': , + 'entity_id': 'switch.fake_profile_logs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_threat_intelligence_feeds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Threat intelligence feeds', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'threat_intelligence_feeds', + 'unique_id': 'xyz12_threat_intelligence_feeds', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_threat_intelligence_feeds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Threat intelligence feeds', + }), + 'context': , + 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_typosquatting_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Typosquatting protection', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'typosquatting_protection', + 'unique_id': 'xyz12_typosquatting_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_typosquatting_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Typosquatting protection', + }), + 'context': , + 'entity_id': 'switch.fake_profile_typosquatting_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[switch.fake_profile_web3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.fake_profile_web3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Web3', + 'platform': 'nextdns', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'web3', + 'unique_id': 'xyz12_web3', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[switch.fake_profile_web3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Fake Profile Web3', + }), + 'context': , + 'entity_id': 'switch.fake_profile_web3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextdns/test_binary_sensor.py b/tests/components/nextdns/test_binary_sensor.py index b69db4798d3..19cad755fb4 100644 --- a/tests/components/nextdns/test_binary_sensor.py +++ b/tests/components/nextdns/test_binary_sensor.py @@ -4,42 +4,26 @@ from datetime import timedelta from unittest.mock import patch from nextdns import ApiError +from syrupy import SnapshotAssertion -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow from . import init_integration, mock_nextdns -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform -async def test_binary_Sensor(hass: HomeAssistant) -> None: +async def test_binary_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test states of the binary sensors.""" - registry = er.async_get(hass) + with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.BINARY_SENSOR]): + entry = await init_integration(hass) - await init_integration(hass) - - state = hass.states.get("binary_sensor.fake_profile_device_connection_status") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("binary_sensor.fake_profile_device_connection_status") - assert entry - assert entry.unique_id == "xyz12_this_device_nextdns_connection_status" - - state = hass.states.get( - "binary_sensor.fake_profile_device_profile_connection_status" - ) - assert state - assert state.state == STATE_OFF - - entry = registry.async_get( - "binary_sensor.fake_profile_device_profile_connection_status" - ) - assert entry - assert entry.unique_id == "xyz12_this_device_profile_connection_status" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability(hass: HomeAssistant) -> None: diff --git a/tests/components/nextdns/test_button.py b/tests/components/nextdns/test_button.py index b5f7b01aee2..51970b9bb48 100644 --- a/tests/components/nextdns/test_button.py +++ b/tests/components/nextdns/test_button.py @@ -2,28 +2,27 @@ from unittest.mock import patch +from syrupy import SnapshotAssertion + from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util from . import init_integration +from tests.common import snapshot_platform -async def test_button(hass: HomeAssistant) -> None: + +async def test_button( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test states of the button.""" - registry = er.async_get(hass) + with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.BUTTON]): + entry = await init_integration(hass) - await init_integration(hass) - - state = hass.states.get("button.fake_profile_clear_logs") - assert state - assert state.state == STATE_UNKNOWN - - entry = registry.async_get("button.fake_profile_clear_logs") - assert entry - assert entry.unique_id == "xyz12_clear_logs" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_button_press(hass: HomeAssistant) -> None: diff --git a/tests/components/nextdns/test_sensor.py b/tests/components/nextdns/test_sensor.py index 951d220eccb..e7ea7a3f56b 100644 --- a/tests/components/nextdns/test_sensor.py +++ b/tests/components/nextdns/test_sensor.py @@ -4,283 +4,37 @@ from datetime import timedelta from unittest.mock import patch from nextdns import ApiError +from syrupy import SnapshotAssertion -from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass -from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow from . import init_integration, mock_nextdns -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform async def test_sensor( - hass: HomeAssistant, entity_registry_enabled_by_default: None + hass: HomeAssistant, + entity_registry_enabled_by_default: None, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test states of sensors.""" - registry = er.async_get(hass) + with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass) - await init_integration(hass) - - state = hass.states.get("sensor.fake_profile_dns_queries") - assert state - assert state.state == "100" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_queries") - assert entry - assert entry.unique_id == "xyz12_all_queries" - - state = hass.states.get("sensor.fake_profile_dns_queries_blocked") - assert state - assert state.state == "20" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_queries_blocked") - assert entry - assert entry.unique_id == "xyz12_blocked_queries" - - state = hass.states.get("sensor.fake_profile_dns_queries_blocked_ratio") - assert state - assert state.state == "20.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_queries_blocked_ratio") - assert entry - assert entry.unique_id == "xyz12_blocked_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dns_queries_relayed") - assert state - assert state.state == "10" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_queries_relayed") - assert entry - assert entry.unique_id == "xyz12_relayed_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_https_queries") - assert state - assert state.state == "20" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_over_https_queries") - assert entry - assert entry.unique_id == "xyz12_doh_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_https_queries_ratio") - assert state - assert state.state == "17.4" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_over_https_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_doh_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dns_over_http_3_queries") - assert state - assert state.state == "15" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_over_http_3_queries") - assert entry - assert entry.unique_id == "xyz12_doh3_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_http_3_queries_ratio") - assert state - assert state.state == "13.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_over_http_3_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_doh3_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dns_over_quic_queries") - assert state - assert state.state == "10" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_over_quic_queries") - assert entry - assert entry.unique_id == "xyz12_doq_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_quic_queries_ratio") - assert state - assert state.state == "8.7" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_over_quic_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_doq_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dns_over_tls_queries") - assert state - assert state.state == "30" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dns_over_tls_queries") - assert entry - assert entry.unique_id == "xyz12_dot_queries" - - state = hass.states.get("sensor.fake_profile_dns_over_tls_queries_ratio") - assert state - assert state.state == "26.1" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dns_over_tls_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_dot_queries_ratio" - - state = hass.states.get("sensor.fake_profile_dnssec_not_validated_queries") - assert state - assert state.state == "25" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dnssec_not_validated_queries") - assert entry - assert entry.unique_id == "xyz12_not_validated_queries" - - state = hass.states.get("sensor.fake_profile_dnssec_validated_queries") - assert state - assert state.state == "75" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_dnssec_validated_queries") - assert entry - assert entry.unique_id == "xyz12_validated_queries" - - state = hass.states.get("sensor.fake_profile_dnssec_validated_queries_ratio") - assert state - assert state.state == "75.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_dnssec_validated_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_validated_queries_ratio" - - state = hass.states.get("sensor.fake_profile_encrypted_queries") - assert state - assert state.state == "60" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_encrypted_queries") - assert entry - assert entry.unique_id == "xyz12_encrypted_queries" - - state = hass.states.get("sensor.fake_profile_unencrypted_queries") - assert state - assert state.state == "40" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_unencrypted_queries") - assert entry - assert entry.unique_id == "xyz12_unencrypted_queries" - - state = hass.states.get("sensor.fake_profile_encrypted_queries_ratio") - assert state - assert state.state == "60.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_encrypted_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_encrypted_queries_ratio" - - state = hass.states.get("sensor.fake_profile_ipv4_queries") - assert state - assert state.state == "90" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_ipv4_queries") - assert entry - assert entry.unique_id == "xyz12_ipv4_queries" - - state = hass.states.get("sensor.fake_profile_ipv6_queries") - assert state - assert state.state == "10" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_ipv6_queries") - assert entry - assert entry.unique_id == "xyz12_ipv6_queries" - - state = hass.states.get("sensor.fake_profile_ipv6_queries_ratio") - assert state - assert state.state == "10.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_ipv6_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_ipv6_queries_ratio" - - state = hass.states.get("sensor.fake_profile_tcp_queries") - assert state - assert state.state == "0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_tcp_queries") - assert entry - assert entry.unique_id == "xyz12_tcp_queries" - - state = hass.states.get("sensor.fake_profile_tcp_queries_ratio") - assert state - assert state.state == "0.0" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_tcp_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_tcp_queries_ratio" - - state = hass.states.get("sensor.fake_profile_udp_queries") - assert state - assert state.state == "40" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "queries" - - entry = registry.async_get("sensor.fake_profile_udp_queries") - assert entry - assert entry.unique_id == "xyz12_udp_queries" - - state = hass.states.get("sensor.fake_profile_udp_queries_ratio") - assert state - assert state.state == "34.8" - assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - - entry = registry.async_get("sensor.fake_profile_udp_queries_ratio") - assert entry - assert entry.unique_id == "xyz12_udp_queries_ratio" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_availability( - hass: HomeAssistant, entity_registry_enabled_by_default: None + hass: HomeAssistant, + entity_registry_enabled_by_default: None, + entity_registry: er.EntityRegistry, ) -> None: """Ensure that we mark the entities unavailable correctly when service causes an error.""" - er.async_get(hass) - await init_integration(hass) state = hass.states.get("sensor.fake_profile_dns_queries") diff --git a/tests/components/nextdns/test_switch.py b/tests/components/nextdns/test_switch.py index a9dd0ba5cbd..2936bad1c67 100644 --- a/tests/components/nextdns/test_switch.py +++ b/tests/components/nextdns/test_switch.py @@ -7,6 +7,7 @@ from aiohttp import ClientError from aiohttp.client_exceptions import ClientConnectorError from nextdns import ApiError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( @@ -16,6 +17,7 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -24,606 +26,20 @@ from homeassistant.util.dt import utcnow from . import init_integration, mock_nextdns -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform async def test_switch( - hass: HomeAssistant, entity_registry_enabled_by_default: None + hass: HomeAssistant, + entity_registry_enabled_by_default: None, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test states of the switches.""" - registry = er.async_get(hass) + with patch("homeassistant.components.nextdns.PLATFORMS", [Platform.SWITCH]): + entry = await init_integration(hass) - await init_integration(hass) - - state = hass.states.get("switch.fake_profile_ai_driven_threat_detection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_ai_driven_threat_detection") - assert entry - assert entry.unique_id == "xyz12_ai_threat_detection" - - state = hass.states.get("switch.fake_profile_allow_affiliate_tracking_links") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_allow_affiliate_tracking_links") - assert entry - assert entry.unique_id == "xyz12_allow_affiliate" - - state = hass.states.get("switch.fake_profile_anonymized_edns_client_subnet") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_anonymized_edns_client_subnet") - assert entry - assert entry.unique_id == "xyz12_anonymized_ecs" - - state = hass.states.get("switch.fake_profile_block_bypass_methods") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_bypass_methods") - assert entry - assert entry.unique_id == "xyz12_block_bypass_methods" - - state = hass.states.get("switch.fake_profile_block_child_sexual_abuse_material") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_child_sexual_abuse_material") - assert entry - assert entry.unique_id == "xyz12_block_csam" - - state = hass.states.get("switch.fake_profile_block_disguised_third_party_trackers") - assert state - assert state.state == STATE_ON - - entry = registry.async_get( - "switch.fake_profile_block_disguised_third_party_trackers" - ) - assert entry - assert entry.unique_id == "xyz12_block_disguised_trackers" - - state = hass.states.get("switch.fake_profile_block_dynamic_dns_hostnames") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_dynamic_dns_hostnames") - assert entry - assert entry.unique_id == "xyz12_block_ddns" - - state = hass.states.get("switch.fake_profile_block_newly_registered_domains") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_newly_registered_domains") - assert entry - assert entry.unique_id == "xyz12_block_nrd" - - state = hass.states.get("switch.fake_profile_block_page") - assert state - assert state.state == STATE_OFF - - entry = registry.async_get("switch.fake_profile_block_page") - assert entry - assert entry.unique_id == "xyz12_block_page" - - state = hass.states.get("switch.fake_profile_block_parked_domains") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_parked_domains") - assert entry - assert entry.unique_id == "xyz12_block_parked_domains" - - state = hass.states.get("switch.fake_profile_cname_flattening") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_cname_flattening") - assert entry - assert entry.unique_id == "xyz12_cname_flattening" - - state = hass.states.get("switch.fake_profile_cache_boost") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_cache_boost") - assert entry - assert entry.unique_id == "xyz12_cache_boost" - - state = hass.states.get("switch.fake_profile_cryptojacking_protection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_cryptojacking_protection") - assert entry - assert entry.unique_id == "xyz12_cryptojacking_protection" - - state = hass.states.get("switch.fake_profile_dns_rebinding_protection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_dns_rebinding_protection") - assert entry - assert entry.unique_id == "xyz12_dns_rebinding_protection" - - state = hass.states.get( - "switch.fake_profile_domain_generation_algorithms_protection" - ) - assert state - assert state.state == STATE_ON - - entry = registry.async_get( - "switch.fake_profile_domain_generation_algorithms_protection" - ) - assert entry - assert entry.unique_id == "xyz12_dga_protection" - - state = hass.states.get("switch.fake_profile_force_safesearch") - assert state - assert state.state == STATE_OFF - - entry = registry.async_get("switch.fake_profile_force_safesearch") - assert entry - assert entry.unique_id == "xyz12_safesearch" - - state = hass.states.get("switch.fake_profile_force_youtube_restricted_mode") - assert state - assert state.state == STATE_OFF - - entry = registry.async_get("switch.fake_profile_force_youtube_restricted_mode") - assert entry - assert entry.unique_id == "xyz12_youtube_restricted_mode" - - state = hass.states.get("switch.fake_profile_google_safe_browsing") - assert state - assert state.state == STATE_OFF - - entry = registry.async_get("switch.fake_profile_google_safe_browsing") - assert entry - assert entry.unique_id == "xyz12_google_safe_browsing" - - state = hass.states.get("switch.fake_profile_idn_homograph_attacks_protection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_idn_homograph_attacks_protection") - assert entry - assert entry.unique_id == "xyz12_idn_homograph_attacks_protection" - - state = hass.states.get("switch.fake_profile_logs") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_logs") - assert entry - assert entry.unique_id == "xyz12_logs" - - state = hass.states.get("switch.fake_profile_threat_intelligence_feeds") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_threat_intelligence_feeds") - assert entry - assert entry.unique_id == "xyz12_threat_intelligence_feeds" - - state = hass.states.get("switch.fake_profile_typosquatting_protection") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_typosquatting_protection") - assert entry - assert entry.unique_id == "xyz12_typosquatting_protection" - - state = hass.states.get("switch.fake_profile_web3") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_web3") - assert entry - assert entry.unique_id == "xyz12_web3" - - state = hass.states.get("switch.fake_profile_block_9gag") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_9gag") - assert entry - assert entry.unique_id == "xyz12_block_9gag" - - state = hass.states.get("switch.fake_profile_block_amazon") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_amazon") - assert entry - assert entry.unique_id == "xyz12_block_amazon" - - state = hass.states.get("switch.fake_profile_block_bereal") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_bereal") - assert entry - assert entry.unique_id == "xyz12_block_bereal" - - state = hass.states.get("switch.fake_profile_block_blizzard") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_blizzard") - assert entry - assert entry.unique_id == "xyz12_block_blizzard" - - state = hass.states.get("switch.fake_profile_block_chatgpt") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_chatgpt") - assert entry - assert entry.unique_id == "xyz12_block_chatgpt" - - state = hass.states.get("switch.fake_profile_block_dailymotion") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_dailymotion") - assert entry - assert entry.unique_id == "xyz12_block_dailymotion" - - state = hass.states.get("switch.fake_profile_block_discord") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_discord") - assert entry - assert entry.unique_id == "xyz12_block_discord" - - state = hass.states.get("switch.fake_profile_block_disney_plus") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_disney_plus") - assert entry - assert entry.unique_id == "xyz12_block_disneyplus" - - state = hass.states.get("switch.fake_profile_block_ebay") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_ebay") - assert entry - assert entry.unique_id == "xyz12_block_ebay" - - state = hass.states.get("switch.fake_profile_block_facebook") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_facebook") - assert entry - assert entry.unique_id == "xyz12_block_facebook" - - state = hass.states.get("switch.fake_profile_block_fortnite") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_fortnite") - assert entry - assert entry.unique_id == "xyz12_block_fortnite" - - state = hass.states.get("switch.fake_profile_block_google_chat") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_google_chat") - assert entry - assert entry.unique_id == "xyz12_block_google_chat" - - state = hass.states.get("switch.fake_profile_block_hbo_max") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_hbo_max") - assert entry - assert entry.unique_id == "xyz12_block_hbomax" - - state = hass.states.get("switch.fake_profile_block_hulu") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_hulu") - assert entry - assert entry.unique_id == "xyz12_block_hulu" - - state = hass.states.get("switch.fake_profile_block_imgur") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_imgur") - assert entry - assert entry.unique_id == "xyz12_block_imgur" - - state = hass.states.get("switch.fake_profile_block_instagram") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_instagram") - assert entry - assert entry.unique_id == "xyz12_block_instagram" - - state = hass.states.get("switch.fake_profile_block_league_of_legends") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_league_of_legends") - assert entry - assert entry.unique_id == "xyz12_block_leagueoflegends" - - state = hass.states.get("switch.fake_profile_block_mastodon") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_mastodon") - assert entry - assert entry.unique_id == "xyz12_block_mastodon" - - state = hass.states.get("switch.fake_profile_block_messenger") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_messenger") - assert entry - assert entry.unique_id == "xyz12_block_messenger" - - state = hass.states.get("switch.fake_profile_block_minecraft") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_minecraft") - assert entry - assert entry.unique_id == "xyz12_block_minecraft" - - state = hass.states.get("switch.fake_profile_block_netflix") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_netflix") - assert entry - assert entry.unique_id == "xyz12_block_netflix" - - state = hass.states.get("switch.fake_profile_block_pinterest") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_pinterest") - assert entry - assert entry.unique_id == "xyz12_block_pinterest" - - state = hass.states.get("switch.fake_profile_block_playstation_network") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_playstation_network") - assert entry - assert entry.unique_id == "xyz12_block_playstation_network" - - state = hass.states.get("switch.fake_profile_block_prime_video") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_prime_video") - assert entry - assert entry.unique_id == "xyz12_block_primevideo" - - state = hass.states.get("switch.fake_profile_block_reddit") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_reddit") - assert entry - assert entry.unique_id == "xyz12_block_reddit" - - state = hass.states.get("switch.fake_profile_block_roblox") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_roblox") - assert entry - assert entry.unique_id == "xyz12_block_roblox" - - state = hass.states.get("switch.fake_profile_block_signal") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_signal") - assert entry - assert entry.unique_id == "xyz12_block_signal" - - state = hass.states.get("switch.fake_profile_block_skype") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_skype") - assert entry - assert entry.unique_id == "xyz12_block_skype" - - state = hass.states.get("switch.fake_profile_block_snapchat") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_snapchat") - assert entry - assert entry.unique_id == "xyz12_block_snapchat" - - state = hass.states.get("switch.fake_profile_block_spotify") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_spotify") - assert entry - assert entry.unique_id == "xyz12_block_spotify" - - state = hass.states.get("switch.fake_profile_block_steam") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_steam") - assert entry - assert entry.unique_id == "xyz12_block_steam" - - state = hass.states.get("switch.fake_profile_block_telegram") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_telegram") - assert entry - assert entry.unique_id == "xyz12_block_telegram" - - state = hass.states.get("switch.fake_profile_block_tiktok") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_tiktok") - assert entry - assert entry.unique_id == "xyz12_block_tiktok" - - state = hass.states.get("switch.fake_profile_block_tinder") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_tinder") - assert entry - assert entry.unique_id == "xyz12_block_tinder" - - state = hass.states.get("switch.fake_profile_block_tumblr") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_tumblr") - assert entry - assert entry.unique_id == "xyz12_block_tumblr" - - state = hass.states.get("switch.fake_profile_block_twitch") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_twitch") - assert entry - assert entry.unique_id == "xyz12_block_twitch" - - state = hass.states.get("switch.fake_profile_block_x_formerly_twitter") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_x_formerly_twitter") - assert entry - assert entry.unique_id == "xyz12_block_twitter" - - state = hass.states.get("switch.fake_profile_block_vimeo") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_vimeo") - assert entry - assert entry.unique_id == "xyz12_block_vimeo" - - state = hass.states.get("switch.fake_profile_block_vk") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_vk") - assert entry - assert entry.unique_id == "xyz12_block_vk" - - state = hass.states.get("switch.fake_profile_block_whatsapp") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_whatsapp") - assert entry - assert entry.unique_id == "xyz12_block_whatsapp" - - state = hass.states.get("switch.fake_profile_block_xbox_live") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_xbox_live") - assert entry - assert entry.unique_id == "xyz12_block_xboxlive" - - state = hass.states.get("switch.fake_profile_block_youtube") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_youtube") - assert entry - assert entry.unique_id == "xyz12_block_youtube" - - state = hass.states.get("switch.fake_profile_block_zoom") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_zoom") - assert entry - assert entry.unique_id == "xyz12_block_zoom" - - state = hass.states.get("switch.fake_profile_block_dating") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_dating") - assert entry - assert entry.unique_id == "xyz12_block_dating" - - state = hass.states.get("switch.fake_profile_block_gambling") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_gambling") - assert entry - assert entry.unique_id == "xyz12_block_gambling" - - state = hass.states.get("switch.fake_profile_block_online_gaming") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_online_gaming") - assert entry - assert entry.unique_id == "xyz12_block_online_gaming" - - state = hass.states.get("switch.fake_profile_block_piracy") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_piracy") - assert entry - assert entry.unique_id == "xyz12_block_piracy" - - state = hass.states.get("switch.fake_profile_block_porn") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_porn") - assert entry - assert entry.unique_id == "xyz12_block_porn" - - state = hass.states.get("switch.fake_profile_block_social_networks") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_social_networks") - assert entry - assert entry.unique_id == "xyz12_block_social_networks" - - state = hass.states.get("switch.fake_profile_block_video_streaming") - assert state - assert state.state == STATE_ON - - entry = registry.async_get("switch.fake_profile_block_video_streaming") - assert entry - assert entry.unique_id == "xyz12_block_video_streaming" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_switch_on(hass: HomeAssistant) -> None: diff --git a/tests/components/ovo_energy/test_config_flow.py b/tests/components/ovo_energy/test_config_flow.py index 7575f1edb29..00899e745b9 100644 --- a/tests/components/ovo_energy/test_config_flow.py +++ b/tests/components/ovo_energy/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import aiohttp from homeassistant import config_entries -from homeassistant.components.ovo_energy.const import DOMAIN +from homeassistant.components.ovo_energy.const import CONF_ACCOUNT, DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -13,7 +13,11 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry FIXTURE_REAUTH_INPUT = {CONF_PASSWORD: "something1"} -FIXTURE_USER_INPUT = {CONF_USERNAME: "example@example.com", CONF_PASSWORD: "something"} +FIXTURE_USER_INPUT = { + CONF_USERNAME: "example@example.com", + CONF_PASSWORD: "something", + CONF_ACCOUNT: "123456", +} UNIQUE_ID = "example@example.com" @@ -37,9 +41,14 @@ async def test_authorization_error(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - with patch( - "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", - return_value=False, + with ( + patch( + "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", + return_value=False, + ), + patch( + "homeassistant.components.ovo_energy.config_flow.OVOEnergy.bootstrap_accounts", + ), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -88,6 +97,9 @@ async def test_full_flow_implementation(hass: HomeAssistant) -> None: "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=True, ), + patch( + "homeassistant.components.ovo_energy.config_flow.OVOEnergy.bootstrap_accounts", + ), patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.username", "some_name", diff --git a/tests/components/plex/conftest.py b/tests/components/plex/conftest.py index 7e82b1c9d26..d00b8eb944b 100644 --- a/tests/components/plex/conftest.py +++ b/tests/components/plex/conftest.py @@ -29,253 +29,253 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: yield mock_setup_entry -@pytest.fixture(name="album", scope="session") +@pytest.fixture(name="album", scope="package") def album_fixture(): """Load album payload and return it.""" return load_fixture("plex/album.xml") -@pytest.fixture(name="artist_albums", scope="session") +@pytest.fixture(name="artist_albums", scope="package") def artist_albums_fixture(): """Load artist's albums payload and return it.""" return load_fixture("plex/artist_albums.xml") -@pytest.fixture(name="children_20", scope="session") +@pytest.fixture(name="children_20", scope="package") def children_20_fixture(): """Load children payload for item 20 and return it.""" return load_fixture("plex/children_20.xml") -@pytest.fixture(name="children_30", scope="session") +@pytest.fixture(name="children_30", scope="package") def children_30_fixture(): """Load children payload for item 30 and return it.""" return load_fixture("plex/children_30.xml") -@pytest.fixture(name="children_200", scope="session") +@pytest.fixture(name="children_200", scope="package") def children_200_fixture(): """Load children payload for item 200 and return it.""" return load_fixture("plex/children_200.xml") -@pytest.fixture(name="children_300", scope="session") +@pytest.fixture(name="children_300", scope="package") def children_300_fixture(): """Load children payload for item 300 and return it.""" return load_fixture("plex/children_300.xml") -@pytest.fixture(name="empty_library", scope="session") +@pytest.fixture(name="empty_library", scope="package") def empty_library_fixture(): """Load an empty library payload and return it.""" return load_fixture("plex/empty_library.xml") -@pytest.fixture(name="empty_payload", scope="session") +@pytest.fixture(name="empty_payload", scope="package") def empty_payload_fixture(): """Load an empty payload and return it.""" return load_fixture("plex/empty_payload.xml") -@pytest.fixture(name="grandchildren_300", scope="session") +@pytest.fixture(name="grandchildren_300", scope="package") def grandchildren_300_fixture(): """Load grandchildren payload for item 300 and return it.""" return load_fixture("plex/grandchildren_300.xml") -@pytest.fixture(name="library_movies_all", scope="session") +@pytest.fixture(name="library_movies_all", scope="package") def library_movies_all_fixture(): """Load payload for all items in the movies library and return it.""" return load_fixture("plex/library_movies_all.xml") -@pytest.fixture(name="library_movies_metadata", scope="session") +@pytest.fixture(name="library_movies_metadata", scope="package") def library_movies_metadata_fixture(): """Load payload for metadata in the movies library and return it.""" return load_fixture("plex/library_movies_metadata.xml") -@pytest.fixture(name="library_movies_collections", scope="session") +@pytest.fixture(name="library_movies_collections", scope="package") def library_movies_collections_fixture(): """Load payload for collections in the movies library and return it.""" return load_fixture("plex/library_movies_collections.xml") -@pytest.fixture(name="library_tvshows_all", scope="session") +@pytest.fixture(name="library_tvshows_all", scope="package") def library_tvshows_all_fixture(): """Load payload for all items in the tvshows library and return it.""" return load_fixture("plex/library_tvshows_all.xml") -@pytest.fixture(name="library_tvshows_metadata", scope="session") +@pytest.fixture(name="library_tvshows_metadata", scope="package") def library_tvshows_metadata_fixture(): """Load payload for metadata in the TV shows library and return it.""" return load_fixture("plex/library_tvshows_metadata.xml") -@pytest.fixture(name="library_tvshows_collections", scope="session") +@pytest.fixture(name="library_tvshows_collections", scope="package") def library_tvshows_collections_fixture(): """Load payload for collections in the TV shows library and return it.""" return load_fixture("plex/library_tvshows_collections.xml") -@pytest.fixture(name="library_music_all", scope="session") +@pytest.fixture(name="library_music_all", scope="package") def library_music_all_fixture(): """Load payload for all items in the music library and return it.""" return load_fixture("plex/library_music_all.xml") -@pytest.fixture(name="library_music_metadata", scope="session") +@pytest.fixture(name="library_music_metadata", scope="package") def library_music_metadata_fixture(): """Load payload for metadata in the music library and return it.""" return load_fixture("plex/library_music_metadata.xml") -@pytest.fixture(name="library_music_collections", scope="session") +@pytest.fixture(name="library_music_collections", scope="package") def library_music_collections_fixture(): """Load payload for collections in the music library and return it.""" return load_fixture("plex/library_music_collections.xml") -@pytest.fixture(name="library_movies_sort", scope="session") +@pytest.fixture(name="library_movies_sort", scope="package") def library_movies_sort_fixture(): """Load sorting payload for movie library and return it.""" return load_fixture("plex/library_movies_sort.xml") -@pytest.fixture(name="library_tvshows_sort", scope="session") +@pytest.fixture(name="library_tvshows_sort", scope="package") def library_tvshows_sort_fixture(): """Load sorting payload for tvshow library and return it.""" return load_fixture("plex/library_tvshows_sort.xml") -@pytest.fixture(name="library_music_sort", scope="session") +@pytest.fixture(name="library_music_sort", scope="package") def library_music_sort_fixture(): """Load sorting payload for music library and return it.""" return load_fixture("plex/library_music_sort.xml") -@pytest.fixture(name="library_movies_filtertypes", scope="session") +@pytest.fixture(name="library_movies_filtertypes", scope="package") def library_movies_filtertypes_fixture(): """Load filtertypes payload for movie library and return it.""" return load_fixture("plex/library_movies_filtertypes.xml") -@pytest.fixture(name="library", scope="session") +@pytest.fixture(name="library", scope="package") def library_fixture(): """Load library payload and return it.""" return load_fixture("plex/library.xml") -@pytest.fixture(name="library_movies_size", scope="session") +@pytest.fixture(name="library_movies_size", scope="package") def library_movies_size_fixture(): """Load movie library size payload and return it.""" return load_fixture("plex/library_movies_size.xml") -@pytest.fixture(name="library_music_size", scope="session") +@pytest.fixture(name="library_music_size", scope="package") def library_music_size_fixture(): """Load music library size payload and return it.""" return load_fixture("plex/library_music_size.xml") -@pytest.fixture(name="library_tvshows_size", scope="session") +@pytest.fixture(name="library_tvshows_size", scope="package") def library_tvshows_size_fixture(): """Load tvshow library size payload and return it.""" return load_fixture("plex/library_tvshows_size.xml") -@pytest.fixture(name="library_tvshows_size_episodes", scope="session") +@pytest.fixture(name="library_tvshows_size_episodes", scope="package") def library_tvshows_size_episodes_fixture(): """Load tvshow library size in episodes payload and return it.""" return load_fixture("plex/library_tvshows_size_episodes.xml") -@pytest.fixture(name="library_tvshows_size_seasons", scope="session") +@pytest.fixture(name="library_tvshows_size_seasons", scope="package") def library_tvshows_size_seasons_fixture(): """Load tvshow library size in seasons payload and return it.""" return load_fixture("plex/library_tvshows_size_seasons.xml") -@pytest.fixture(name="library_sections", scope="session") +@pytest.fixture(name="library_sections", scope="package") def library_sections_fixture(): """Load library sections payload and return it.""" return load_fixture("plex/library_sections.xml") -@pytest.fixture(name="media_1", scope="session") +@pytest.fixture(name="media_1", scope="package") def media_1_fixture(): """Load media payload for item 1 and return it.""" return load_fixture("plex/media_1.xml") -@pytest.fixture(name="media_30", scope="session") +@pytest.fixture(name="media_30", scope="package") def media_30_fixture(): """Load media payload for item 30 and return it.""" return load_fixture("plex/media_30.xml") -@pytest.fixture(name="media_100", scope="session") +@pytest.fixture(name="media_100", scope="package") def media_100_fixture(): """Load media payload for item 100 and return it.""" return load_fixture("plex/media_100.xml") -@pytest.fixture(name="media_200", scope="session") +@pytest.fixture(name="media_200", scope="package") def media_200_fixture(): """Load media payload for item 200 and return it.""" return load_fixture("plex/media_200.xml") -@pytest.fixture(name="player_plexweb_resources", scope="session") +@pytest.fixture(name="player_plexweb_resources", scope="package") def player_plexweb_resources_fixture(): """Load resources payload for a Plex Web player and return it.""" return load_fixture("plex/player_plexweb_resources.xml") -@pytest.fixture(name="player_plexhtpc_resources", scope="session") +@pytest.fixture(name="player_plexhtpc_resources", scope="package") def player_plexhtpc_resources_fixture(): """Load resources payload for a Plex HTPC player and return it.""" return load_fixture("plex/player_plexhtpc_resources.xml") -@pytest.fixture(name="playlists", scope="session") +@pytest.fixture(name="playlists", scope="package") def playlists_fixture(): """Load payload for all playlists and return it.""" return load_fixture("plex/playlists.xml") -@pytest.fixture(name="playlist_500", scope="session") +@pytest.fixture(name="playlist_500", scope="package") def playlist_500_fixture(): """Load payload for playlist 500 and return it.""" return load_fixture("plex/playlist_500.xml") -@pytest.fixture(name="playqueue_created", scope="session") +@pytest.fixture(name="playqueue_created", scope="package") def playqueue_created_fixture(): """Load payload for playqueue creation response and return it.""" return load_fixture("plex/playqueue_created.xml") -@pytest.fixture(name="playqueue_1234", scope="session") +@pytest.fixture(name="playqueue_1234", scope="package") def playqueue_1234_fixture(): """Load payload for playqueue 1234 and return it.""" return load_fixture("plex/playqueue_1234.xml") -@pytest.fixture(name="plex_server_accounts", scope="session") +@pytest.fixture(name="plex_server_accounts", scope="package") def plex_server_accounts_fixture(): """Load payload accounts on the Plex server and return it.""" return load_fixture("plex/plex_server_accounts.xml") -@pytest.fixture(name="plex_server_base", scope="session") +@pytest.fixture(name="plex_server_base", scope="package") def plex_server_base_fixture(): """Load base payload for Plex server info and return it.""" return load_fixture("plex/plex_server_base.xml") -@pytest.fixture(name="plex_server_default", scope="session") +@pytest.fixture(name="plex_server_default", scope="package") def plex_server_default_fixture(plex_server_base): """Load default payload for Plex server info and return it.""" return plex_server_base.format( @@ -283,133 +283,133 @@ def plex_server_default_fixture(plex_server_base): ) -@pytest.fixture(name="plex_server_clients", scope="session") +@pytest.fixture(name="plex_server_clients", scope="package") def plex_server_clients_fixture(): """Load available clients payload for Plex server and return it.""" return load_fixture("plex/plex_server_clients.xml") -@pytest.fixture(name="plextv_account", scope="session") +@pytest.fixture(name="plextv_account", scope="package") def plextv_account_fixture(): """Load account info from plex.tv and return it.""" return load_fixture("plex/plextv_account.xml") -@pytest.fixture(name="plextv_resources", scope="session") +@pytest.fixture(name="plextv_resources", scope="package") def plextv_resources_fixture(): """Load single-server payload for plex.tv resources and return it.""" return load_fixture("plex/plextv_resources_one_server.xml") -@pytest.fixture(name="plextv_resources_two_servers", scope="session") +@pytest.fixture(name="plextv_resources_two_servers", scope="package") def plextv_resources_two_servers_fixture(): """Load two-server payload for plex.tv resources and return it.""" return load_fixture("plex/plextv_resources_two_servers.xml") -@pytest.fixture(name="plextv_shared_users", scope="session") +@pytest.fixture(name="plextv_shared_users", scope="package") def plextv_shared_users_fixture(): """Load payload for plex.tv shared users and return it.""" return load_fixture("plex/plextv_shared_users.xml") -@pytest.fixture(name="session_base", scope="session") +@pytest.fixture(name="session_base", scope="package") def session_base_fixture(): """Load the base session payload and return it.""" return load_fixture("plex/session_base.xml") -@pytest.fixture(name="session_default", scope="session") +@pytest.fixture(name="session_default", scope="package") def session_default_fixture(session_base): """Load the default session payload and return it.""" return session_base.format(user_id=1) -@pytest.fixture(name="session_new_user", scope="session") +@pytest.fixture(name="session_new_user", scope="package") def session_new_user_fixture(session_base): """Load the new user session payload and return it.""" return session_base.format(user_id=1001) -@pytest.fixture(name="session_photo", scope="session") +@pytest.fixture(name="session_photo", scope="package") def session_photo_fixture(): """Load a photo session payload and return it.""" return load_fixture("plex/session_photo.xml") -@pytest.fixture(name="session_plexweb", scope="session") +@pytest.fixture(name="session_plexweb", scope="package") def session_plexweb_fixture(): """Load a Plex Web session payload and return it.""" return load_fixture("plex/session_plexweb.xml") -@pytest.fixture(name="session_transient", scope="session") +@pytest.fixture(name="session_transient", scope="package") def session_transient_fixture(): """Load a transient session payload and return it.""" return load_fixture("plex/session_transient.xml") -@pytest.fixture(name="session_unknown", scope="session") +@pytest.fixture(name="session_unknown", scope="package") def session_unknown_fixture(): """Load a hypothetical unknown session payload and return it.""" return load_fixture("plex/session_unknown.xml") -@pytest.fixture(name="session_live_tv", scope="session") +@pytest.fixture(name="session_live_tv", scope="package") def session_live_tv_fixture(): """Load a Live TV session payload and return it.""" return load_fixture("plex/session_live_tv.xml") -@pytest.fixture(name="livetv_sessions", scope="session") +@pytest.fixture(name="livetv_sessions", scope="package") def livetv_sessions_fixture(): """Load livetv/sessions payload and return it.""" return load_fixture("plex/livetv_sessions.xml") -@pytest.fixture(name="security_token", scope="session") +@pytest.fixture(name="security_token", scope="package") def security_token_fixture(): """Load a security token payload and return it.""" return load_fixture("plex/security_token.xml") -@pytest.fixture(name="show_seasons", scope="session") +@pytest.fixture(name="show_seasons", scope="package") def show_seasons_fixture(): """Load a show's seasons payload and return it.""" return load_fixture("plex/show_seasons.xml") -@pytest.fixture(name="sonos_resources", scope="session") +@pytest.fixture(name="sonos_resources", scope="package") def sonos_resources_fixture(): """Load Sonos resources payload and return it.""" return load_fixture("plex/sonos_resources.xml") -@pytest.fixture(name="hubs", scope="session") +@pytest.fixture(name="hubs", scope="package") def hubs_fixture(): """Load hubs resource payload and return it.""" return load_fixture("plex/hubs.xml") -@pytest.fixture(name="hubs_music_library", scope="session") +@pytest.fixture(name="hubs_music_library", scope="package") def hubs_music_library_fixture(): """Load music library hubs resource payload and return it.""" return load_fixture("plex/hubs_library_section.xml") -@pytest.fixture(name="update_check_nochange", scope="session") +@pytest.fixture(name="update_check_nochange", scope="package") def update_check_fixture_nochange() -> str: """Load a no-change update resource payload and return it.""" return load_fixture("plex/release_nochange.xml") -@pytest.fixture(name="update_check_new", scope="session") +@pytest.fixture(name="update_check_new", scope="package") def update_check_fixture_new() -> str: """Load a changed update resource payload and return it.""" return load_fixture("plex/release_new.xml") -@pytest.fixture(name="update_check_new_not_updatable", scope="session") +@pytest.fixture(name="update_check_new_not_updatable", scope="package") def update_check_fixture_new_not_updatable() -> str: """Load a changed update resource payload (not updatable) and return it.""" return load_fixture("plex/release_new_not_updatable.xml") diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index d655f95c79b..d496edb4149 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -25,7 +25,7 @@ "dhw_state": false, "flame_state": false, "heating_state": true, - "slave_boiler_state": false + "secondary_boiler_state": false }, "dev_class": "heater_central", "location": "a57efe5f145f498c9be62a9b63626fbf", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index 92c95f6c5a9..ef7af8a362b 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -25,7 +25,7 @@ "dhw_state": false, "flame_state": false, "heating_state": false, - "slave_boiler_state": false + "secondary_boiler_state": false }, "dev_class": "heater_central", "location": "a57efe5f145f498c9be62a9b63626fbf", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index be400b9bc98..8f2e6a75f3f 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -25,7 +25,7 @@ "dhw_state": false, "flame_state": false, "heating_state": false, - "slave_boiler_state": false + "secondary_boiler_state": false }, "dev_class": "heater_central", "location": "a57efe5f145f498c9be62a9b63626fbf", diff --git a/tests/components/plugwise/fixtures/stretch_v31/all_data.json b/tests/components/plugwise/fixtures/stretch_v31/all_data.json index f42cde65b39..a875324fc13 100644 --- a/tests/components/plugwise/fixtures/stretch_v31/all_data.json +++ b/tests/components/plugwise/fixtures/stretch_v31/all_data.json @@ -136,7 +136,6 @@ "gateway": { "gateway_id": "0000aaaa0000aaaa0000aaaa0000aa00", "item_count": 83, - "notifications": {}, "smile_name": "Stretch" } } diff --git a/tests/components/plugwise/test_init.py b/tests/components/plugwise/test_init.py index 4eb0b2cb56a..b206b36be89 100644 --- a/tests/components/plugwise/test_init.py +++ b/tests/components/plugwise/test_init.py @@ -12,9 +12,8 @@ from plugwise.exceptions import ( import pytest from homeassistant.components.plugwise.const import DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -22,6 +21,9 @@ from tests.common import MockConfigEntry HEATER_ID = "1cbf783bb11e4a7c8a6843dee3a86927" # Opentherm device_id for migration PLUG_ID = "cd0ddb54ef694e11ac18ed1cbce5dbbd" # VCR device_id for migration +SECONDARY_ID = ( + "1cbf783bb11e4a7c8a6843dee3a86927" # Heater_central device_id for migration +) async def test_load_unload_config_entry( @@ -77,7 +79,7 @@ async def test_gateway_config_entry_not_ready( [ ( { - "domain": SENSOR_DOMAIN, + "domain": Platform.SENSOR, "platform": DOMAIN, "unique_id": f"{HEATER_ID}-outdoor_temperature", "suggested_object_id": f"{HEATER_ID}-outdoor_temperature", @@ -118,7 +120,18 @@ async def test_migrate_unique_id_temperature( [ ( { - "domain": SWITCH_DOMAIN, + "domain": Platform.BINARY_SENSOR, + "platform": DOMAIN, + "unique_id": f"{SECONDARY_ID}-slave_boiler_state", + "suggested_object_id": f"{SECONDARY_ID}-slave_boiler_state", + "disabled_by": None, + }, + f"{SECONDARY_ID}-slave_boiler_state", + f"{SECONDARY_ID}-secondary_boiler_state", + ), + ( + { + "domain": Platform.SWITCH, "platform": DOMAIN, "unique_id": f"{PLUG_ID}-plug", "suggested_object_id": f"{PLUG_ID}-plug", diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 99b73209ad7..499d1a5df14 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -57,6 +57,7 @@ from homeassistant.const import ( STATE_ON, STATE_OPEN, STATE_OPENING, + STATE_UNAVAILABLE, STATE_UNLOCKED, UnitOfEnergy, UnitOfTemperature, @@ -1053,6 +1054,126 @@ async def test_disabling_entity( ) +@pytest.mark.parametrize("namespace", [""]) +async def test_entity_becomes_unavailable_with_export( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + client: ClientSessionGenerator, + sensor_entities: dict[str, er.RegistryEntry], +) -> None: + """Test an entity that becomes unavailable is still exported.""" + data = {**sensor_entities} + + await hass.async_block_till_done() + body = await generate_latest_metrics(client) + + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) + + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) + + # Make sensor_1 unavailable. + set_state_with_entry( + hass, data["sensor_1"], STATE_UNAVAILABLE, data["sensor_1_attributes"] + ) + + await hass.async_block_till_done() + body = await generate_latest_metrics(client) + + # Check that only the availability changed on sensor_1. + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 15.6' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 2.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 0.0' in body + ) + + # The other sensor should be unchanged. + assert ( + 'sensor_humidity_percent{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 54.0' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_humidity",' + 'friendly_name="Outside Humidity"} 1.0' in body + ) + + # Bring sensor_1 back and check that it is correct. + set_state_with_entry(hass, data["sensor_1"], 200.0, data["sensor_1_attributes"]) + + await hass.async_block_till_done() + body = await generate_latest_metrics(client) + + assert ( + 'sensor_temperature_celsius{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 200.0' in body + ) + + assert ( + 'state_change_total{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 3.0' in body + ) + + assert ( + 'entity_available{domain="sensor",' + 'entity="sensor.outside_temperature",' + 'friendly_name="Outside Temperature"} 1.0' in body + ) + + @pytest.fixture(name="sensor_entities") async def sensor_fixture( hass: HomeAssistant, entity_registry: er.EntityRegistry diff --git a/tests/components/qbittorrent/test_helpers.py b/tests/components/qbittorrent/test_helpers.py new file mode 100644 index 00000000000..b308cd33aec --- /dev/null +++ b/tests/components/qbittorrent/test_helpers.py @@ -0,0 +1,108 @@ +"""Test the qBittorrent helpers.""" + +from homeassistant.components.qbittorrent.helpers import ( + format_progress, + format_torrent, + format_torrents, + format_unix_timestamp, + seconds_to_hhmmss, +) +from homeassistant.core import HomeAssistant + + +async def test_seconds_to_hhmmss( + hass: HomeAssistant, +) -> None: + """Test the seconds_to_hhmmss function.""" + assert seconds_to_hhmmss(8640000) == "None" + assert seconds_to_hhmmss(3661) == "01:01:01" + + +async def test_format_unix_timestamp( + hass: HomeAssistant, +) -> None: + """Test the format_unix_timestamp function.""" + assert format_unix_timestamp(1640995200) == "2022-01-01T00:00:00+00:00" + + +async def test_format_progress( + hass: HomeAssistant, +) -> None: + """Test the format_progress function.""" + assert format_progress({"progress": 0.5}) == "50.00" + + +async def test_format_torrents( + hass: HomeAssistant, +) -> None: + """Test the format_torrents function.""" + torrents_data = [ + { + "name": "torrent1", + "hash": "hash1", + "added_on": 1640995200, + "progress": 0.5, + "state": "paused", + "eta": 86400, + "ratio": 1.0, + }, + { + "name": "torrent2", + "hash": "hash1", + "added_on": 1640995200, + "progress": 0.5, + "state": "paused", + "eta": 86400, + "ratio": 1.0, + }, + ] + + expected_result = { + "torrent1": { + "id": "hash1", + "added_date": "2022-01-01T00:00:00+00:00", + "percent_done": "50.00", + "status": "paused", + "eta": "24:00:00", + "ratio": "1.00", + }, + "torrent2": { + "id": "hash1", + "added_date": "2022-01-01T00:00:00+00:00", + "percent_done": "50.00", + "status": "paused", + "eta": "24:00:00", + "ratio": "1.00", + }, + } + + result = format_torrents(torrents_data) + + assert result == expected_result + + +async def test_format_torrent( + hass: HomeAssistant, +) -> None: + """Test the format_torrent function.""" + torrent_data = { + "hash": "hash1", + "added_on": 1640995200, + "progress": 0.5, + "state": "paused", + "eta": 86400, + "ratio": 1.0, + } + + expected_result = { + "id": "hash1", + "added_date": "2022-01-01T00:00:00+00:00", + "percent_done": "50.00", + "status": "paused", + "eta": "24:00:00", + "ratio": "1.00", + } + + result = format_torrent(torrent_data) + + assert result == expected_result diff --git a/tests/components/rainbird/conftest.py b/tests/components/rainbird/conftest.py index 10101986007..59471f5eed4 100644 --- a/tests/components/rainbird/conftest.py +++ b/tests/components/rainbird/conftest.py @@ -187,7 +187,7 @@ def aioclient_mock(hass: HomeAssistant) -> Generator[AiohttpClientMocker, None, def rainbird_json_response(result: dict[str, str]) -> bytes: """Create a fake API response.""" return encryption.encrypt( - '{"jsonrpc": "2.0", "result": %s, "id": 1} ' % json.dumps(result), + f'{{"jsonrpc": "2.0", "result": {json.dumps(result)}, "id": 1}} ', PASSWORD, ) diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 7a57b226d77..e0f43323f25 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -109,7 +109,9 @@ async def async_wait_recording_done(hass: HomeAssistant) -> None: await hass.async_block_till_done() -async def async_wait_purge_done(hass: HomeAssistant, max: int | None = None) -> None: +async def async_wait_purge_done( + hass: HomeAssistant, max_number: int | None = None +) -> None: """Wait for max number of purge events. Because a purge may insert another PurgeTask into @@ -117,9 +119,9 @@ async def async_wait_purge_done(hass: HomeAssistant, max: int | None = None) -> a maximum number of WaitTasks that we will put into the queue. """ - if not max: - max = DEFAULT_PURGE_TASKS - for _ in range(max + 1): + if not max_number: + max_number = DEFAULT_PURGE_TASKS + for _ in range(max_number + 1): await async_wait_recording_done(hass) @@ -325,10 +327,10 @@ def convert_pending_states_to_meta(instance: Recorder, session: Session) -> None entity_ids: set[str] = set() states: set[States] = set() states_meta_objects: dict[str, StatesMeta] = {} - for object in session: - if isinstance(object, States): - entity_ids.add(object.entity_id) - states.add(object) + for session_object in session: + if isinstance(session_object, States): + entity_ids.add(session_object.entity_id) + states.add(session_object) entity_id_to_metadata_ids = instance.states_meta_manager.get_many( entity_ids, session, True @@ -352,10 +354,10 @@ def convert_pending_events_to_event_types(instance: Recorder, session: Session) event_types: set[str] = set() events: set[Events] = set() event_types_objects: dict[str, EventTypes] = {} - for object in session: - if isinstance(object, Events): - event_types.add(object.event_type) - events.add(object) + for session_object in session: + if isinstance(session_object, Events): + event_types.add(session_object.event_type) + events.add(session_object) event_type_to_event_type_ids = instance.event_type_manager.get_many( event_types, session, True diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index b2da3f1d62f..e80bc7ca7d1 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -9,6 +9,7 @@ from freezegun import freeze_time import pytest from sqlalchemy.exc import DatabaseError, OperationalError from sqlalchemy.orm.session import Session +from voluptuous.error import MultipleInvalid from homeassistant.components import recorder from homeassistant.components.recorder.const import SupportedDialect @@ -1446,20 +1447,20 @@ async def test_purge_entities( _add_purge_records(hass) - # Confirm calling service without arguments matches all records (default filter behavior) + # Confirm calling service without arguments is invalid with session_scope(hass=hass) as session: states = session.query(States) assert states.count() == 190 - await _purge_entities(hass, [], [], []) + with pytest.raises(MultipleInvalid): + await _purge_entities(hass, [], [], []) with session_scope(hass=hass, read_only=True) as session: states = session.query(States) - assert states.count() == 0 + assert states.count() == 190 - # The states_meta table should be empty states_meta_remain = session.query(StatesMeta) - assert states_meta_remain.count() == 0 + assert states_meta_remain.count() == 4 async def _add_test_states(hass: HomeAssistant, wait_recording_done: bool = True): diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 549280efba2..9e32fa2c500 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -1040,14 +1040,14 @@ async def test_resolve_period(hass: HomeAssistant) -> None: def test_chunked_or_all(): """Test chunked_or_all can iterate chunk sizes larger than the passed in collection.""" - all = [] + all_items = [] incoming = (1, 2, 3, 4) for chunk in chunked_or_all(incoming, 2): assert len(chunk) == 2 - all.extend(chunk) - assert all == [1, 2, 3, 4] + all_items.extend(chunk) + assert all_items == [1, 2, 3, 4] - all = [] + all_items = [] incoming = (1, 2, 3, 4) for chunk in chunked_or_all(incoming, 5): assert len(chunk) == 4 @@ -1055,5 +1055,5 @@ def test_chunked_or_all(): # collection since we want to avoid copying the collection # if we don't need to assert chunk is incoming - all.extend(chunk) - assert all == [1, 2, 3, 4] + all_items.extend(chunk) + assert all_items == [1, 2, 3, 4] diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index d594218e9d4..4a1410d45a4 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -641,12 +641,12 @@ async def test_statistic_during_period_hole( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistic_during_period when there are holes in the data.""" - id = 1 + stat_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal stat_id + stat_id += 1 + return stat_id now = dt_util.utcnow() diff --git a/tests/components/renault/const.py b/tests/components/renault/const.py index d849c658149..19c40f6ec20 100644 --- a/tests/components/renault/const.py +++ b/tests/components/renault/const.py @@ -127,7 +127,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-remove", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "always", ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode", }, @@ -363,7 +368,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-clock", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "schedule_mode", ATTR_UNIQUE_ID: "vf1aaaaa555777999_charge_mode", }, @@ -599,7 +609,12 @@ MOCK_VEHICLES = { { ATTR_ENTITY_ID: "select.reg_number_charge_mode", ATTR_ICON: "mdi:calendar-remove", - ATTR_OPTIONS: ["always", "always_charging", "schedule_mode"], + ATTR_OPTIONS: [ + "always", + "always_charging", + "schedule_mode", + "scheduled", + ], ATTR_STATE: "always", ATTR_UNIQUE_ID: "vf1aaaaa555777123_charge_mode", }, diff --git a/tests/components/renault/snapshots/test_select.ambr b/tests/components/renault/snapshots/test_select.ambr index 7e8356ee070..0722cb5cab3 100644 --- a/tests/components/renault/snapshots/test_select.ambr +++ b/tests/components/renault/snapshots/test_select.ambr @@ -82,6 +82,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -121,6 +122,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -175,6 +177,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -214,6 +217,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -268,6 +272,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -307,6 +312,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -401,6 +407,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -440,6 +447,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -494,6 +502,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -533,6 +542,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , @@ -587,6 +597,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'config_entry_id': , @@ -626,6 +637,7 @@ 'always', 'always_charging', 'schedule_mode', + 'scheduled', ]), }), 'context': , diff --git a/tests/components/risco/test_binary_sensor.py b/tests/components/risco/test_binary_sensor.py index ea18c59e236..b6ea723064e 100644 --- a/tests/components/risco/test_binary_sensor.py +++ b/tests/components/risco/test_binary_sensor.py @@ -122,11 +122,11 @@ async def test_local_setup( async def _check_local_state( - hass, zones, property, value, entity_id, zone_id, callback + hass, zones, entity_property, value, entity_id, zone_id, callback ): with patch.object( zones[zone_id], - property, + entity_property, new_callable=PropertyMock(return_value=value), ): await callback(zone_id, zones[zone_id]) @@ -210,19 +210,19 @@ async def test_armed_local_states( ) -async def _check_system_state(hass, system, property, value, callback): +async def _check_system_state(hass, system, entity_property, value, callback): with patch.object( system, - property, + entity_property, new_callable=PropertyMock(return_value=value), ): await callback(system) await hass.async_block_till_done() expected_value = STATE_ON if value else STATE_OFF - if property == "ac_trouble": - property = "a_c_trouble" - entity_id = f"binary_sensor.test_site_name_{property}" + if entity_property == "ac_trouble": + entity_property = "a_c_trouble" + entity_id = f"binary_sensor.test_site_name_{entity_property}" assert hass.states.get(entity_id).state == expected_value @@ -275,6 +275,10 @@ async def test_system_states( "clock_trouble", "box_tamper", ] - for property in properties: - await _check_system_state(hass, system_only_local, property, True, callback) - await _check_system_state(hass, system_only_local, property, False, callback) + for entity_property in properties: + await _check_system_state( + hass, system_only_local, entity_property, True, callback + ) + await _check_system_state( + hass, system_only_local, entity_property, False, callback + ) diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 157eb3e62b5..a8236ad3d87 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -133,8 +133,8 @@ async def test_error_on_login( await hass.async_block_till_done() registry = er.async_get(hass) - for id in ENTITY_IDS.values(): - assert not registry.async_is_registered(id) + for entity_id in ENTITY_IDS.values(): + assert not registry.async_is_registered(entity_id) def _check_state(hass, category, entity_id): @@ -184,8 +184,8 @@ async def test_cloud_setup( ) -> None: """Test entity setup.""" registry = er.async_get(hass) - for id in ENTITY_IDS.values(): - assert registry.async_is_registered(id) + for entity_id in ENTITY_IDS.values(): + assert registry.async_is_registered(entity_id) save_mock.assert_awaited_once_with({LAST_EVENT_TIMESTAMP_KEY: TEST_EVENTS[0].time}) for category, entity_id in ENTITY_IDS.items(): @@ -213,5 +213,5 @@ async def test_local_setup( ) -> None: """Test entity setup.""" registry = er.async_get(hass) - for id in ENTITY_IDS.values(): - assert not registry.async_is_registered(id) + for entity_id in ENTITY_IDS.values(): + assert not registry.async_is_registered(entity_id) diff --git a/tests/components/roborock/test_sensor.py b/tests/components/roborock/test_sensor.py index 23d16f643b2..88ed6e1098c 100644 --- a/tests/components/roborock/test_sensor.py +++ b/tests/components/roborock/test_sensor.py @@ -89,6 +89,7 @@ async def test_listener_update( ) ] ) + await hass.async_block_till_done() assert hass.states.get("sensor.roborock_s7_maxv_filter_time_left").state == str( FILTER_REPLACE_TIME - 743 ) diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 404b9a6b3af..1b8cf4c999d 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -9,7 +9,7 @@ 'TV', 'HDMI', ]), - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'media_player.any', @@ -51,7 +51,7 @@ 'original_name': None, 'platform': 'samsungtv', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': 'sample-entry-id', 'unit_of_measurement': None, diff --git a/tests/components/sensibo/conftest.py b/tests/components/sensibo/conftest.py index d98b19c3833..1c835cd8001 100644 --- a/tests/components/sensibo/conftest.py +++ b/tests/components/sensibo/conftest.py @@ -74,7 +74,7 @@ def load_json_from_fixture(load_data: str) -> SensiboData: return json_data -@pytest.fixture(name="load_data", scope="session") +@pytest.fixture(name="load_data", scope="package") def load_data_from_fixture() -> str: """Load fixture with fixture data and return.""" return load_fixture("data.json", "sensibo") diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 9e8e401ea46..079984476b0 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -1146,6 +1146,14 @@ async def test_unit_conversion_priority_precision( suggested_display_precision=suggested_precision, suggested_unit_of_measurement=suggested_unit, ) + entity4 = MockSensor( + name="Test", + device_class=device_class, + native_unit_of_measurement=native_unit, + native_value=str(native_value), + suggested_display_precision=None, + unique_id="very_unique_4", + ) setup_test_component_platform( hass, sensor.DOMAIN, @@ -1154,6 +1162,7 @@ async def test_unit_conversion_priority_precision( entity1, entity2, entity3, + entity4, ], ) @@ -1230,6 +1239,21 @@ async def test_unit_conversion_priority_precision( round(custom_state, 4) ) + # Set a display_precision without having suggested_display_precision + entity_registry.async_update_entity_options( + entity4.entity_id, + "sensor", + {"display_precision": 4}, + ) + entry4 = entity_registry.async_get(entity4.entity_id) + assert "suggested_display_precision" not in entry4.options["sensor"] + assert entry4.options["sensor"]["display_precision"] == 4 + await hass.async_block_till_done() + state = hass.states.get(entity4.entity_id) + assert float(async_rounded_state(hass, entity4.entity_id, state)) == pytest.approx( + round(automatic_state, 4) + ) + @pytest.mark.parametrize( ( @@ -1594,6 +1618,41 @@ async def test_suggested_precision_option_update( } +async def test_suggested_precision_option_removal( + hass: HomeAssistant, +) -> None: + """Test suggested precision stored in the registry is removed.""" + + entity_registry = er.async_get(hass) + + # Pre-register entities + entry = entity_registry.async_get_or_create("sensor", "test", "very_unique") + entity_registry.async_update_entity_options( + entry.entity_id, + "sensor", + { + "suggested_display_precision": 1, + }, + ) + + entity0 = MockSensor( + name="Test", + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.HOURS, + native_value="1.5", + suggested_display_precision=None, + unique_id="very_unique", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) + await hass.async_block_till_done() + + # Assert the suggested precision is no longer stored in the registry + entry = entity_registry.async_get(entity0.entity_id) + assert entry.options.get("sensor", {}).get("suggested_display_precision") is None + + @pytest.mark.parametrize( ( "unit_system", diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 8084fe69e89..a7aaf938410 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -560,7 +560,7 @@ def test_compile_hourly_statistics_purged_state_changes( ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - mean = min = max = float(hist["sensor.test1"][-1].state) + mean = min_value = max_value = float(hist["sensor.test1"][-1].state) # Purge all states from the database with freeze_time(four): @@ -594,8 +594,8 @@ def test_compile_hourly_statistics_purged_state_changes( "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), - "min": pytest.approx(min), - "max": pytest.approx(max), + "min": pytest.approx(min_value), + "max": pytest.approx(max_value), "last_reset": None, "state": None, "sum": None, @@ -4113,12 +4113,12 @@ async def test_validate_unit_change_convertible( The test also asserts that the sensor's device class is ignored. """ - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4228,12 +4228,12 @@ async def test_validate_statistics_unit_ignore_device_class( The test asserts that the sensor's device class is ignored. """ - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4321,14 +4321,14 @@ async def test_validate_statistics_unit_change_no_device_class( conversion, and the unit is then changed to a unit which can and cannot be converted to the original unit. """ - id = 1 + msg_id = 1 attributes = dict(attributes) attributes.pop("device_class") def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4436,12 +4436,12 @@ async def test_validate_statistics_unsupported_state_class( unit, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4505,12 +4505,12 @@ async def test_validate_statistics_sensor_no_longer_recorded( unit, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4573,12 +4573,12 @@ async def test_validate_statistics_sensor_not_recorded( unit, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4638,12 +4638,12 @@ async def test_validate_statistics_sensor_removed( unit, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4702,12 +4702,12 @@ async def test_validate_statistics_unit_change_no_conversion( unit2, ) -> None: """Test validate_statistics.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4837,12 +4837,12 @@ async def test_validate_statistics_unit_change_equivalent_units( This tests no validation issue is created when a sensor's unit changes to an equivalent unit. """ - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -4923,12 +4923,12 @@ async def test_validate_statistics_unit_change_equivalent_units_2( equivalent unit which is not known to the unit converters. """ - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( @@ -5005,12 +5005,12 @@ async def test_validate_statistics_other_domain( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test sensor does not raise issues for statistics for other domains.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_validation_result(client, expected_result): await client.send_json( diff --git a/tests/components/seventeentrack/__init__.py b/tests/components/seventeentrack/__init__.py index 4101f34496e..b3452b38f96 100644 --- a/tests/components/seventeentrack/__init__.py +++ b/tests/components/seventeentrack/__init__.py @@ -4,7 +4,7 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory -from homeassistant.components.seventeentrack.sensor import DEFAULT_SCAN_INTERVAL +from homeassistant.components.seventeentrack.const import DEFAULT_SCAN_INTERVAL from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, async_fire_time_changed diff --git a/tests/components/seventeentrack/conftest.py b/tests/components/seventeentrack/conftest.py index 2865b3f2599..2e266a9b13c 100644 --- a/tests/components/seventeentrack/conftest.py +++ b/tests/components/seventeentrack/conftest.py @@ -7,12 +7,10 @@ from py17track.package import Package import pytest from homeassistant.components.seventeentrack.const import ( - DEFAULT_SHOW_ARCHIVED, - DEFAULT_SHOW_DELIVERED, -) -from homeassistant.components.seventeentrack.sensor import ( CONF_SHOW_ARCHIVED, CONF_SHOW_DELIVERED, + DEFAULT_SHOW_ARCHIVED, + DEFAULT_SHOW_DELIVERED, ) from homeassistant.const import CONF_PASSWORD, CONF_USERNAME @@ -28,6 +26,8 @@ DEFAULT_SUMMARY = { "Returned": 0, } +DEFAULT_SUMMARY_LENGTH = len(DEFAULT_SUMMARY) + ACCOUNT_ID = "1234" NEW_SUMMARY_DATA = { diff --git a/tests/components/seventeentrack/snapshots/test_services.ambr b/tests/components/seventeentrack/snapshots/test_services.ambr new file mode 100644 index 00000000000..185a1d44fe0 --- /dev/null +++ b/tests/components/seventeentrack/snapshots/test_services.ambr @@ -0,0 +1,53 @@ +# serializer version: 1 +# name: test_get_all_packages + dict({ + 'packages': list([ + dict({ + 'friendly_name': 'friendly name 3', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'Expired', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '123', + }), + dict({ + 'friendly_name': 'friendly name 1', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'In Transit', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '456', + }), + dict({ + 'friendly_name': 'friendly name 2', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'Delivered', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '789', + }), + ]), + }) +# --- +# name: test_get_packages_from_list + dict({ + 'packages': list([ + dict({ + 'friendly_name': 'friendly name 1', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'In Transit', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '456', + }), + dict({ + 'friendly_name': 'friendly name 2', + 'info_text': 'info text 1', + 'location': 'location 1', + 'status': 'Delivered', + 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_number': '789', + }), + ]), + }) +# --- diff --git a/tests/components/seventeentrack/test_sensor.py b/tests/components/seventeentrack/test_sensor.py index aa7f61ad318..31fc5deec24 100644 --- a/tests/components/seventeentrack/test_sensor.py +++ b/tests/components/seventeentrack/test_sensor.py @@ -14,6 +14,7 @@ from homeassistant.setup import async_setup_component from . import goto_future, init_integration from .conftest import ( DEFAULT_SUMMARY, + DEFAULT_SUMMARY_LENGTH, NEW_SUMMARY_DATA, VALID_PLATFORM_CONFIG_FULL, get_package, @@ -72,11 +73,10 @@ async def test_add_package( """Ensure package is added correctly when user add a new package.""" package = get_package() mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert hass.states.get("sensor.17track_package_friendly_name_1") + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 package2 = get_package( tracking_number="789", @@ -89,8 +89,8 @@ async def test_add_package( await goto_future(hass, freezer) - assert hass.states.get("sensor.seventeentrack_package_789") is not None - assert len(hass.states.async_entity_ids()) == 2 + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 2 async def test_add_package_default_friendly_name( @@ -101,13 +101,12 @@ async def test_add_package_default_friendly_name( """Ensure package is added correctly with default friendly name when user add a new package without his own friendly name.""" package = get_package(friendly_name=None) mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) - state_456 = hass.states.get("sensor.seventeentrack_package_456") + state_456 = hass.states.get("sensor.17track_package_456") assert state_456 is not None - assert state_456.attributes["friendly_name"] == "Seventeentrack Package: 456" - assert len(hass.states.async_entity_ids()) == 1 + assert state_456.attributes["friendly_name"] == "17Track Package 456" + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 async def test_remove_package( @@ -130,26 +129,20 @@ async def test_remove_package( package1, package2, ] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert hass.states.get("sensor.seventeentrack_package_789") is not None - assert len(hass.states.async_entity_ids()) == 2 + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None + assert hass.states.get("sensor.17track_package_friendly_name_2") is not None + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 2 mock_seventeentrack.return_value.profile.packages.return_value = [package2] await goto_future(hass, freezer) - assert hass.states.get("sensor.seventeentrack_package_456").state == "unavailable" - assert len(hass.states.async_entity_ids()) == 2 - - await goto_future(hass, freezer) - - assert hass.states.get("sensor.seventeentrack_package_456") is None - assert hass.states.get("sensor.seventeentrack_package_789") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert hass.states.get("sensor.17track_package_friendly_name_1") is None + assert hass.states.get("sensor.17track_package_friendly_name_2") is not None + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 async def test_package_error( @@ -164,36 +157,7 @@ async def test_package_error( mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is None - - -async def test_friendly_name_changed( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test friendly name change.""" - package = get_package() - mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} - - await init_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 - - package = get_package(friendly_name="friendly name 2") - mock_seventeentrack.return_value.profile.packages.return_value = [package] - - await goto_future(hass, freezer) - - assert hass.states.get("sensor.seventeentrack_package_456") is not None - entity = hass.data["entity_components"]["sensor"].get_entity( - "sensor.seventeentrack_package_456" - ) - assert entity.name == "Seventeentrack Package: friendly name 2" - assert len(hass.states.async_entity_ids()) == 1 + assert hass.states.get("sensor.17track_package_friendly_name_1") is None async def test_delivered_not_shown( @@ -205,7 +169,6 @@ async def test_delivered_not_shown( """Ensure delivered packages are not shown.""" package = get_package(status=40) mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} with patch( "homeassistant.components.seventeentrack.sensor.persistent_notification" @@ -213,7 +176,7 @@ async def test_delivered_not_shown( await init_integration(hass, mock_config_entry_with_default_options) await goto_future(hass, freezer) - assert not hass.states.async_entity_ids() + assert hass.states.get("sensor.17track_package_friendly_name_1") is None persistent_notification_mock.create.assert_called() @@ -225,15 +188,14 @@ async def test_delivered_shown( """Ensure delivered packages are show when user choose to show them.""" package = get_package(status=40) mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} with patch( "homeassistant.components.seventeentrack.sensor.persistent_notification" ) as persistent_notification_mock: await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 persistent_notification_mock.create.assert_not_called() @@ -246,12 +208,11 @@ async def test_becomes_delivered_not_shown_notification( """Ensure notification is triggered when package becomes delivered.""" package = get_package() mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry_with_default_options) - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 package_delivered = get_package(status=40) mock_seventeentrack.return_value.profile.packages.return_value = [package_delivered] @@ -260,10 +221,9 @@ async def test_becomes_delivered_not_shown_notification( "homeassistant.components.seventeentrack.sensor.persistent_notification" ) as persistent_notification_mock: await goto_future(hass, freezer) - await goto_future(hass, freezer) persistent_notification_mock.create.assert_called() - assert not hass.states.async_entity_ids() + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH async def test_summary_correctly_updated( @@ -275,33 +235,27 @@ async def test_summary_correctly_updated( """Ensure summary entities are not duplicated.""" package = get_package(status=30) mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = DEFAULT_SUMMARY await init_integration(hass, mock_config_entry) - assert len(hass.states.async_entity_ids()) == 8 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 - state_ready_picked = hass.states.get( - "sensor.seventeentrack_packages_ready_to_be_picked_up" - ) + state_ready_picked = hass.states.get("sensor.17track_ready_to_be_picked_up") assert state_ready_picked is not None assert len(state_ready_picked.attributes["packages"]) == 1 mock_seventeentrack.return_value.profile.packages.return_value = [] mock_seventeentrack.return_value.profile.summary.return_value = NEW_SUMMARY_DATA - await goto_future(hass, freezer) await goto_future(hass, freezer) - assert len(hass.states.async_entity_ids()) == 7 + assert len(hass.states.async_entity_ids()) == len(NEW_SUMMARY_DATA) for state in hass.states.async_all(): assert state.state == "1" - state_ready_picked = hass.states.get( - "sensor.seventeentrack_packages_ready_to_be_picked_up" - ) + state_ready_picked = hass.states.get("sensor.17track_ready_to_be_picked_up") assert state_ready_picked is not None - assert state_ready_picked.attributes["packages"] is None + assert len(state_ready_picked.attributes["packages"]) == 0 async def test_summary_error( @@ -318,7 +272,7 @@ async def test_summary_error( await init_integration(hass, mock_config_entry) - assert len(hass.states.async_entity_ids()) == 1 + assert len(hass.states.async_entity_ids()) == 0 assert ( hass.states.get("sensor.seventeentrack_packages_ready_to_be_picked_up") is None @@ -334,13 +288,12 @@ async def test_utc_timestamp( package = get_package(tz="Asia/Jakarta") mock_seventeentrack.return_value.profile.packages.return_value = [package] - mock_seventeentrack.return_value.profile.summary.return_value = {} await init_integration(hass, mock_config_entry) - assert hass.states.get("sensor.seventeentrack_package_456") is not None - assert len(hass.states.async_entity_ids()) == 1 - state_456 = hass.states.get("sensor.seventeentrack_package_456") + assert hass.states.get("sensor.17track_package_friendly_name_1") is not None + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 + state_456 = hass.states.get("sensor.17track_package_friendly_name_1") assert state_456 is not None assert str(state_456.attributes.get("timestamp")) == "2020-08-10 03:32:00+00:00" diff --git a/tests/components/seventeentrack/test_services.py b/tests/components/seventeentrack/test_services.py new file mode 100644 index 00000000000..cbd7132bf67 --- /dev/null +++ b/tests/components/seventeentrack/test_services.py @@ -0,0 +1,76 @@ +"""Tests for the seventeentrack service.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.seventeentrack import DOMAIN, SERVICE_GET_PACKAGES +from homeassistant.core import HomeAssistant, SupportsResponse + +from tests.common import MockConfigEntry +from tests.components.seventeentrack import init_integration +from tests.components.seventeentrack.conftest import get_package + + +async def test_get_packages_from_list( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure service returns only the packages in the list.""" + await _mock_packages(mock_seventeentrack) + await init_integration(hass, mock_config_entry) + service_response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PACKAGES, + { + "config_entry_id": mock_config_entry.entry_id, + "package_state": ["in_transit", "delivered"], + }, + blocking=True, + return_response=SupportsResponse.ONLY, + ) + + assert service_response == snapshot + + +async def test_get_all_packages( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure service returns all packages when non provided.""" + await _mock_packages(mock_seventeentrack) + await init_integration(hass, mock_config_entry) + service_response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PACKAGES, + { + "config_entry_id": mock_config_entry.entry_id, + }, + blocking=True, + return_response=SupportsResponse.ONLY, + ) + + assert service_response == snapshot + + +async def _mock_packages(mock_seventeentrack): + package1 = get_package(status=10) + package2 = get_package( + tracking_number="789", + friendly_name="friendly name 2", + status=40, + ) + package3 = get_package( + tracking_number="123", + friendly_name="friendly name 3", + status=20, + ) + mock_seventeentrack.return_value.profile.packages.return_value = [ + package1, + package2, + package3, + ] diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index 9fee3468f11..9946dd7640d 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -25,7 +25,12 @@ from homeassistant.components.climate import ( from homeassistant.components.shelly.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_UNAVAILABLE +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + STATE_ON, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.device_registry import DeviceRegistry @@ -711,3 +716,36 @@ async def test_wall_display_thermostat_mode( entry = entity_registry.async_get(climate_entity_id) assert entry assert entry.unique_id == "123456789ABC-thermostat:0" + + +async def test_wall_display_thermostat_mode_external_actuator( + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test Wall Display in thermostat mode with an external actuator.""" + climate_entity_id = "climate.test_name" + switch_entity_id = "switch.test_switch_0" + + new_status = deepcopy(mock_rpc_device.status) + new_status["sys"]["relay_in_thermostat"] = False + monkeypatch.setattr(mock_rpc_device, "status", new_status) + + await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + + # the switch entity should be created + state = hass.states.get(switch_entity_id) + assert state + assert state.state == STATE_ON + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 + + # the climate entity should be created + state = hass.states.get(climate_entity_id) + assert state + assert state.state == HVACMode.HEAT + assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 + + entry = entity_registry.async_get(climate_entity_id) + assert entry + assert entry.unique_id == "123456789ABC-thermostat:0" diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index c4db8acaf6d..39238f1674a 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -96,11 +96,11 @@ async def test_get_triggers_rpc_device( CONF_PLATFORM: "device", CONF_DEVICE_ID: device.id, CONF_DOMAIN: DOMAIN, - CONF_TYPE: type, + CONF_TYPE: trigger_type, CONF_SUBTYPE: "button1", "metadata": {}, } - for type in [ + for trigger_type in [ "btn_down", "btn_up", "single_push", @@ -130,11 +130,11 @@ async def test_get_triggers_button( CONF_PLATFORM: "device", CONF_DEVICE_ID: device.id, CONF_DOMAIN: DOMAIN, - CONF_TYPE: type, + CONF_TYPE: trigger_type, CONF_SUBTYPE: "button", "metadata": {}, } - for type in ["single", "double", "triple", "long"] + for trigger_type in ["single", "double", "triple", "long"] ] triggers = await async_get_device_automations( diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index fe2c4354afc..dd214c8841d 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -330,6 +330,7 @@ async def test_wall_display_relay_mode( new_status = deepcopy(mock_rpc_device.status) new_status["sys"]["relay_in_thermostat"] = False + new_status.pop("thermostat:0") monkeypatch.setattr(mock_rpc_device, "status", new_status) await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index b6d34b9d98a..d25cc8849e5 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -342,7 +342,7 @@ def event_request_factory_fixture(event_factory): if events is None: events = [] if device_ids: - events.extend([event_factory(id) for id in device_ids]) + events.extend([event_factory(device_id) for device_id in device_ids]) events.append(event_factory(uuid4())) events.append(event_factory(device_ids[0], event_type="OTHER")) request.events = events diff --git a/tests/components/smhi/conftest.py b/tests/components/smhi/conftest.py index df6a81a223d..62da5207565 100644 --- a/tests/components/smhi/conftest.py +++ b/tests/components/smhi/conftest.py @@ -7,13 +7,13 @@ from homeassistant.components.smhi.const import DOMAIN from tests.common import load_fixture -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def api_response(): """Return an API response.""" return load_fixture("smhi.json", DOMAIN) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def api_response_lack_data(): """Return an API response.""" return load_fixture("smhi_short.json", DOMAIN) diff --git a/tests/components/solaredge/test_config_flow.py b/tests/components/solaredge/test_config_flow.py index 9ff605a871d..759a4d6b421 100644 --- a/tests/components/solaredge/test_config_flow.py +++ b/tests/components/solaredge/test_config_flow.py @@ -1,9 +1,9 @@ """Tests for the SolarEdge config flow.""" -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiohttp import ClientError import pytest -from requests.exceptions import ConnectTimeout, HTTPError from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME, DOMAIN from homeassistant.config_entries import SOURCE_IGNORE, SOURCE_USER @@ -22,8 +22,11 @@ API_KEY = "a1b2c3d4e5f6g7h8" def mock_controller(): """Mock a successful Solaredge API.""" api = Mock() - api.get_details.return_value = {"details": {"status": "active"}} - with patch("solaredge.Solaredge", return_value=api): + api.get_details = AsyncMock(return_value={"details": {"status": "active"}}) + with patch( + "homeassistant.components.solaredge.config_flow.aiosolaredge.SolarEdge", + return_value=api, + ): yield api @@ -117,7 +120,7 @@ async def test_asserts(hass: HomeAssistant, test_api: Mock) -> None: assert result.get("errors") == {CONF_SITE_ID: "invalid_api_key"} # test with ConnectionTimeout - test_api.get_details.side_effect = ConnectTimeout() + test_api.get_details = AsyncMock(side_effect=TimeoutError()) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -127,7 +130,7 @@ async def test_asserts(hass: HomeAssistant, test_api: Mock) -> None: assert result.get("errors") == {CONF_SITE_ID: "could_not_connect"} # test with HTTPError - test_api.get_details.side_effect = HTTPError() + test_api.get_details = AsyncMock(side_effect=ClientError()) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, diff --git a/tests/components/solaredge/test_coordinator.py b/tests/components/solaredge/test_coordinator.py index b1496d18d93..7a6b3af1cde 100644 --- a/tests/components/solaredge/test_coordinator.py +++ b/tests/components/solaredge/test_coordinator.py @@ -1,6 +1,6 @@ """Tests for the SolarEdge coordinator services.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest @@ -25,7 +25,7 @@ def enable_all_entities(entity_registry_enabled_by_default): """Make sure all entities are enabled.""" -@patch("homeassistant.components.solaredge.Solaredge") +@patch("homeassistant.components.solaredge.SolarEdge") async def test_solaredgeoverviewdataservice_energy_values_validity( mock_solaredge, hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: @@ -35,7 +35,9 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( title=DEFAULT_NAME, data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}, ) - mock_solaredge().get_details.return_value = {"details": {"status": "active"}} + mock_solaredge().get_details = AsyncMock( + return_value={"details": {"status": "active"}} + ) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -50,7 +52,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( "currentPower": {"power": 0.0}, } } - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -60,7 +62,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( # Invalid energy values, lifeTimeData energy is lower than last year, month or day. mock_overview_data["overview"]["lifeTimeData"]["energy"] = 0 - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -71,7 +73,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( # New valid energy values update mock_overview_data["overview"]["lifeTimeData"]["energy"] = 100001 - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -82,7 +84,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( # Invalid energy values, lastYearData energy is lower than last month or day. mock_overview_data["overview"]["lastYearData"]["energy"] = 0 - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -100,7 +102,7 @@ async def test_solaredgeoverviewdataservice_energy_values_validity( mock_overview_data["overview"]["lastYearData"]["energy"] = 0.0 mock_overview_data["overview"]["lastMonthData"]["energy"] = 0.0 mock_overview_data["overview"]["lastDayData"]["energy"] = 0.0 - mock_solaredge().get_overview.return_value = mock_overview_data + mock_solaredge().get_overview = AsyncMock(return_value=mock_overview_data) freezer.tick(OVERVIEW_UPDATE_DELAY) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 218ca90a26b..3da0dd5c983 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -203,6 +203,7 @@ class SoCoMockFactory: my_speaker_info["zone_name"] = name my_speaker_info["uid"] = mock_soco.uid mock_soco.get_speaker_info = Mock(return_value=my_speaker_info) + mock_soco.add_to_queue = Mock(return_value=10) mock_soco.avTransport = SonosMockService("AVTransport", ip_address) mock_soco.renderingControl = SonosMockService("RenderingControl", ip_address) @@ -303,11 +304,116 @@ def config_fixture(): return {DOMAIN: {MP_DOMAIN: {CONF_HOSTS: ["192.168.42.2"]}}} +class MockMusicServiceItem: + """Mocks a Soco MusicServiceItem.""" + + def __init__(self, title: str, item_id: str, parent_id: str, item_class: str): + """Initialize the mock item.""" + self.title = title + self.item_id = item_id + self.item_class = item_class + self.parent_id = parent_id + + +def mock_browse_by_idstring( + search_type: str, idstring: str, start=0, max_items=100, full_album_art_uri=False +) -> list[MockMusicServiceItem]: + """Mock the call to browse_by_id_string.""" + if search_type == "album_artists" and idstring == "A:ALBUMARTIST/Beatles": + return [ + MockMusicServiceItem( + "All", + idstring + "/", + idstring, + "object.container.playlistContainer.sameArtist", + ), + MockMusicServiceItem( + "A Hard Day's Night", + "A:ALBUMARTIST/Beatles/A%20Hard%20Day's%20Night", + idstring, + "object.container.album.musicAlbum", + ), + MockMusicServiceItem( + "Abbey Road", + "A:ALBUMARTIST/Beatles/Abbey%20Road", + idstring, + "object.container.album.musicAlbum", + ), + ] + # browse_by_id_string works with URL encoded or decoded strings + if search_type == "genres" and idstring in ( + "A:GENRE/Classic%20Rock", + "A:GENRE/Classic Rock", + ): + return [ + MockMusicServiceItem( + "All", + "A:GENRE/Classic%20Rock/", + "A:GENRE/Classic%20Rock", + "object.container.albumlist", + ), + MockMusicServiceItem( + "Bruce Springsteen", + "A:GENRE/Classic%20Rock/Bruce%20Springsteen", + "A:GENRE/Classic%20Rock", + "object.container.person.musicArtist", + ), + MockMusicServiceItem( + "Cream", + "A:GENRE/Classic%20Rock/Cream", + "A:GENRE/Classic%20Rock", + "object.container.person.musicArtist", + ), + ] + if search_type == "composers" and idstring in ( + "A:COMPOSER/Carlos%20Santana", + "A:COMPOSER/Carlos Santana", + ): + return [ + MockMusicServiceItem( + "All", + "A:COMPOSER/Carlos%20Santana/", + "A:COMPOSER/Carlos%20Santana", + "object.container.playlistContainer.sameArtist", + ), + MockMusicServiceItem( + "Between Good And Evil", + "A:COMPOSER/Carlos%20Santana/Between%20Good%20And%20Evil", + "A:COMPOSER/Carlos%20Santana", + "object.container.album.musicAlbum", + ), + MockMusicServiceItem( + "Sacred Fire", + "A:COMPOSER/Carlos%20Santana/Sacred%20Fire", + "A:COMPOSER/Carlos%20Santana", + "object.container.album.musicAlbum", + ), + ] + return [] + + +def mock_get_music_library_information( + search_type: str, search_term: str, full_album_art_uri: bool = True +) -> list[MockMusicServiceItem]: + """Mock the call to get music library information.""" + if search_type == "albums" and search_term == "Abbey Road": + return [ + MockMusicServiceItem( + "Abbey Road", + "A:ALBUM/Abbey%20Road", + "A:ALBUM", + "object.container.album.musicAlbum", + ) + ] + + @pytest.fixture(name="music_library") def music_library_fixture(): """Create music_library fixture.""" music_library = MagicMock() music_library.get_sonos_favorites.return_value.update_id = 1 + music_library.browse_by_idstring = mock_browse_by_idstring + music_library.get_music_library_information = mock_get_music_library_information return music_library @@ -481,7 +587,7 @@ def mock_get_source_ip(mock_get_source_ip): return mock_get_source_ip -@pytest.fixture(name="zgs_discovery", scope="session") +@pytest.fixture(name="zgs_discovery", scope="package") def zgs_discovery_fixture(): """Load ZoneGroupState discovery payload and return it.""" return load_fixture("sonos/zgs_discovery.xml") diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index c181520b85d..976d3480429 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -7,7 +7,10 @@ import pytest from homeassistant.components.media_player import ( DOMAIN as MP_DOMAIN, SERVICE_PLAY_MEDIA, + MediaPlayerEnqueue, ) +from homeassistant.components.media_player.const import ATTR_MEDIA_ENQUEUE +from homeassistant.components.sonos.media_player import LONG_SERVICE_TIMEOUT from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import ( @@ -16,7 +19,7 @@ from homeassistant.helpers.device_registry import ( DeviceRegistry, ) -from .conftest import SoCoMockFactory +from .conftest import MockMusicServiceItem, SoCoMockFactory async def test_device_registry( @@ -65,35 +68,134 @@ async def test_entity_basic( assert attributes["volume_level"] == 0.19 -class _MockMusicServiceItem: - """Mocks a Soco MusicServiceItem.""" - - def __init__( - self, - title: str, - item_id: str, - parent_id: str, - item_class: str, - ) -> None: - """Initialize the mock item.""" - self.title = title - self.item_id = item_id - self.item_class = item_class - self.parent_id = parent_id - - def get_uri(self) -> str: - """Return URI.""" - return self.item_id.replace("S://", "x-file-cifs://") +@pytest.mark.parametrize( + ("media_content_type", "media_content_id", "enqueue", "test_result"), + [ + ( + "artist", + "A:ALBUMARTIST/Beatles", + MediaPlayerEnqueue.REPLACE, + { + "title": "All", + "item_id": "A:ALBUMARTIST/Beatles/", + "clear_queue": 1, + "position": None, + "play": 1, + "play_pos": 0, + }, + ), + ( + "genre", + "A:GENRE/Classic%20Rock", + MediaPlayerEnqueue.ADD, + { + "title": "All", + "item_id": "A:GENRE/Classic%20Rock/", + "clear_queue": 0, + "position": None, + "play": 0, + "play_pos": 0, + }, + ), + ( + "album", + "A:ALBUM/Abbey%20Road", + MediaPlayerEnqueue.NEXT, + { + "title": "Abbey Road", + "item_id": "A:ALBUM/Abbey%20Road", + "clear_queue": 0, + "position": 1, + "play": 0, + "play_pos": 0, + }, + ), + ( + "composer", + "A:COMPOSER/Carlos%20Santana", + MediaPlayerEnqueue.PLAY, + { + "title": "All", + "item_id": "A:COMPOSER/Carlos%20Santana/", + "clear_queue": 0, + "position": 1, + "play": 1, + "play_pos": 9, + }, + ), + ( + "artist", + "A:ALBUMARTIST/Beatles/Abbey%20Road", + MediaPlayerEnqueue.REPLACE, + { + "title": "Abbey Road", + "item_id": "A:ALBUMARTIST/Beatles/Abbey%20Road", + "clear_queue": 1, + "position": None, + "play": 1, + "play_pos": 0, + }, + ), + ], +) +async def test_play_media_library( + hass: HomeAssistant, + soco_factory: SoCoMockFactory, + async_autosetup_sonos, + media_content_type, + media_content_id, + enqueue, + test_result, +) -> None: + """Test playing local library with a variety of options.""" + sock_mock = soco_factory.mock_list.get("192.168.42.2") + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + "entity_id": "media_player.zone_a", + "media_content_type": media_content_type, + "media_content_id": media_content_id, + ATTR_MEDIA_ENQUEUE: enqueue, + }, + blocking=True, + ) + assert sock_mock.clear_queue.call_count == test_result["clear_queue"] + assert sock_mock.add_to_queue.call_count == 1 + assert ( + sock_mock.add_to_queue.call_args_list[0].args[0].title == test_result["title"] + ) + assert ( + sock_mock.add_to_queue.call_args_list[0].args[0].item_id + == test_result["item_id"] + ) + if test_result["position"] is not None: + assert ( + sock_mock.add_to_queue.call_args_list[0].kwargs["position"] + == test_result["position"] + ) + else: + assert "position" not in sock_mock.add_to_queue.call_args_list[0].kwargs + assert ( + sock_mock.add_to_queue.call_args_list[0].kwargs["timeout"] + == LONG_SERVICE_TIMEOUT + ) + assert sock_mock.play_from_queue.call_count == test_result["play"] + if test_result["play"] != 0: + assert ( + sock_mock.play_from_queue.call_args_list[0].args[0] + == test_result["play_pos"] + ) _mock_playlists = [ - _MockMusicServiceItem( + MockMusicServiceItem( "playlist1", "S://192.168.1.68/music/iTunes/iTunes%20Music%20Library.xml#GUID_1", "A:PLAYLISTS", "object.container.playlistContainer", ), - _MockMusicServiceItem( + MockMusicServiceItem( "playlist2", "S://192.168.1.68/music/iTunes/iTunes%20Music%20Library.xml#GUID_2", "A:PLAYLISTS", diff --git a/tests/components/soundtouch/conftest.py b/tests/components/soundtouch/conftest.py index c81d76072d7..5bfeeea5ec5 100644 --- a/tests/components/soundtouch/conftest.py +++ b/tests/components/soundtouch/conftest.py @@ -47,97 +47,97 @@ def device2_config() -> MockConfigEntry: ) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_info() -> str: """Load SoundTouch device 1 info response and return it.""" return load_fixture("soundtouch/device1_info.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_aux() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_aux.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_bluetooth() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_bluetooth.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_radio() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_radio.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_standby() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_standby.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_upnp() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_upnp.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_now_playing_upnp_paused() -> str: """Load SoundTouch device 1 now_playing response and return it.""" return load_fixture("soundtouch/device1_now_playing_upnp_paused.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_presets() -> str: """Load SoundTouch device 1 presets response and return it.""" return load_fixture("soundtouch/device1_presets.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_volume() -> str: """Load SoundTouch device 1 volume response and return it.""" return load_fixture("soundtouch/device1_volume.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_volume_muted() -> str: """Load SoundTouch device 1 volume response and return it.""" return load_fixture("soundtouch/device1_volume_muted.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device1_zone_master() -> str: """Load SoundTouch device 1 getZone response and return it.""" return load_fixture("soundtouch/device1_getZone_master.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device2_info() -> str: """Load SoundTouch device 2 info response and return it.""" return load_fixture("soundtouch/device2_info.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device2_volume() -> str: """Load SoundTouch device 2 volume response and return it.""" return load_fixture("soundtouch/device2_volume.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device2_now_playing_standby() -> str: """Load SoundTouch device 2 now_playing response and return it.""" return load_fixture("soundtouch/device2_now_playing_standby.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def device2_zone_slave() -> str: """Load SoundTouch device 2 getZone response and return it.""" return load_fixture("soundtouch/device2_getZone_slave.xml") -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def zone_empty() -> str: """Load empty SoundTouch getZone response and return it.""" return load_fixture("soundtouch/getZone_empty.xml") diff --git a/tests/components/synology_dsm/snapshots/test_config_flow.ambr b/tests/components/synology_dsm/snapshots/test_config_flow.ambr new file mode 100644 index 00000000000..807ec764e52 --- /dev/null +++ b/tests/components/synology_dsm/snapshots/test_config_flow.ambr @@ -0,0 +1,86 @@ +# serializer version: 1 +# name: test_discovered_via_zeroconf + dict({ + 'host': '192.168.1.5', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 5001, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_form_ssdp + dict({ + 'host': '192.168.1.5', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 5001, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_user + dict({ + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 1234, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_user.1 + dict({ + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 5000, + 'ssl': False, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_user_2sa + dict({ + 'device_token': 'Dév!cè_T0k€ñ', + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 5001, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- +# name: test_user_vdsm + dict({ + 'host': 'nas.meontheinternet.com', + 'mac': list([ + '00-11-32-XX-XX-59', + '00-11-32-XX-XX-5A', + ]), + 'password': 'password', + 'port': 1234, + 'ssl': True, + 'username': 'Home_Assistant', + 'verify_ssl': False, + }) +# --- diff --git a/tests/components/synology_dsm/test_config_flow.py b/tests/components/synology_dsm/test_config_flow.py index 483e22f2359..85814f84aad 100644 --- a/tests/components/synology_dsm/test_config_flow.py +++ b/tests/components/synology_dsm/test_config_flow.py @@ -11,19 +11,15 @@ from synology_dsm.exceptions import ( SynologyDSMLoginInvalidException, SynologyDSMRequestException, ) +from syrupy import SnapshotAssertion from homeassistant.components import ssdp, zeroconf from homeassistant.components.synology_dsm.config_flow import CONF_OTP_CODE from homeassistant.components.synology_dsm.const import ( CONF_SNAPSHOT_QUALITY, - CONF_VOLUMES, - DEFAULT_PORT, - DEFAULT_PORT_SSL, DEFAULT_SCAN_INTERVAL, DEFAULT_SNAPSHOT_QUALITY, DEFAULT_TIMEOUT, - DEFAULT_USE_SSL, - DEFAULT_VERIFY_SSL, DOMAIN, ) from homeassistant.config_entries import ( @@ -33,7 +29,6 @@ from homeassistant.config_entries import ( SOURCE_ZEROCONF, ) from homeassistant.const import ( - CONF_DISKS, CONF_HOST, CONF_MAC, CONF_PASSWORD, @@ -149,7 +144,11 @@ def mock_controller_service_failed(): @pytest.mark.usefixtures("mock_setup_entry") -async def test_user(hass: HomeAssistant, service: MagicMock) -> None: +async def test_user( + hass: HomeAssistant, + service: MagicMock, + snapshot: SnapshotAssertion, +) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=None @@ -177,16 +176,7 @@ async def test_user(hass: HomeAssistant, service: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == PORT - assert result["data"][CONF_SSL] == USE_SSL - assert result["data"][CONF_VERIFY_SSL] == VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot service.information.serial = SERIAL_2 with patch( @@ -208,20 +198,13 @@ async def test_user(hass: HomeAssistant, service: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL_2 assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == DEFAULT_PORT - assert not result["data"][CONF_SSL] - assert result["data"][CONF_VERIFY_SSL] == VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") -async def test_user_2sa(hass: HomeAssistant, service_2sa: MagicMock) -> None: +async def test_user_2sa( + hass: HomeAssistant, service_2sa: MagicMock, snapshot: SnapshotAssertion +) -> None: """Test user with 2sa authentication config.""" with patch( "homeassistant.components.synology_dsm.config_flow.SynologyDSM", @@ -261,20 +244,13 @@ async def test_user_2sa(hass: HomeAssistant, service_2sa: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == DEFAULT_PORT_SSL - assert result["data"][CONF_SSL] == DEFAULT_USE_SSL - assert result["data"][CONF_VERIFY_SSL] == DEFAULT_VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") == DEVICE_TOKEN - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") -async def test_user_vdsm(hass: HomeAssistant, service_vdsm: MagicMock) -> None: +async def test_user_vdsm( + hass: HomeAssistant, service_vdsm: MagicMock, snapshot: SnapshotAssertion +) -> None: """Test user config.""" with patch( "homeassistant.components.synology_dsm.config_flow.SynologyDSM", @@ -306,16 +282,7 @@ async def test_user_vdsm(hass: HomeAssistant, service_vdsm: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == HOST - assert result["data"][CONF_HOST] == HOST - assert result["data"][CONF_PORT] == PORT - assert result["data"][CONF_SSL] == USE_SSL - assert result["data"][CONF_VERIFY_SSL] == VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") @@ -467,7 +434,9 @@ async def test_missing_data_after_login( @pytest.mark.usefixtures("mock_setup_entry") -async def test_form_ssdp(hass: HomeAssistant, service: MagicMock) -> None: +async def test_form_ssdp( + hass: HomeAssistant, service: MagicMock, snapshot: SnapshotAssertion +) -> None: """Test we can setup from ssdp.""" result = await hass.config_entries.flow.async_init( @@ -498,16 +467,7 @@ async def test_form_ssdp(hass: HomeAssistant, service: MagicMock) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == "mydsm" - assert result["data"][CONF_HOST] == "192.168.1.5" - assert result["data"][CONF_PORT] == 5001 - assert result["data"][CONF_SSL] == DEFAULT_USE_SSL - assert result["data"][CONF_VERIFY_SSL] == DEFAULT_VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") @@ -664,7 +624,9 @@ async def test_options_flow(hass: HomeAssistant, service: MagicMock) -> None: @pytest.mark.usefixtures("mock_setup_entry") -async def test_discovered_via_zeroconf(hass: HomeAssistant, service: MagicMock) -> None: +async def test_discovered_via_zeroconf( + hass: HomeAssistant, service: MagicMock, snapshot: SnapshotAssertion +) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( @@ -697,16 +659,7 @@ async def test_discovered_via_zeroconf(hass: HomeAssistant, service: MagicMock) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == SERIAL assert result["title"] == "mydsm" - assert result["data"][CONF_HOST] == "192.168.1.5" - assert result["data"][CONF_PORT] == 5001 - assert result["data"][CONF_SSL] == DEFAULT_USE_SSL - assert result["data"][CONF_VERIFY_SSL] == DEFAULT_VERIFY_SSL - assert result["data"][CONF_USERNAME] == USERNAME - assert result["data"][CONF_PASSWORD] == PASSWORD - assert result["data"][CONF_MAC] == MACS - assert result["data"].get("device_token") is None - assert result["data"].get(CONF_DISKS) is None - assert result["data"].get(CONF_VOLUMES) is None + assert result["data"] == snapshot @pytest.mark.usefixtures("mock_setup_entry") diff --git a/tests/components/tankerkoenig/conftest.py b/tests/components/tankerkoenig/conftest.py index 4400082a45f..1a3dcb6f991 100644 --- a/tests/components/tankerkoenig/conftest.py +++ b/tests/components/tankerkoenig/conftest.py @@ -6,20 +6,11 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.tankerkoenig import DOMAIN -from homeassistant.components.tankerkoenig.const import CONF_FUEL_TYPES, CONF_STATIONS -from homeassistant.const import ( - CONF_API_KEY, - CONF_LATITUDE, - CONF_LOCATION, - CONF_LONGITUDE, - CONF_NAME, - CONF_RADIUS, - CONF_SHOW_ON_MAP, -) +from homeassistant.const import CONF_SHOW_ON_MAP from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .const import NEARBY_STATIONS, PRICES, STATION +from .const import CONFIG_DATA, NEARBY_STATIONS, PRICES, STATION from tests.common import MockConfigEntry @@ -55,16 +46,7 @@ async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: options={ CONF_SHOW_ON_MAP: True, }, - data={ - CONF_NAME: "Home", - CONF_API_KEY: "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx", - CONF_FUEL_TYPES: ["e5"], - CONF_LOCATION: {CONF_LATITUDE: 51.0, CONF_LONGITUDE: 13.0}, - CONF_RADIUS: 2.0, - CONF_STATIONS: [ - "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", - ], - }, + data=CONFIG_DATA, ) diff --git a/tests/components/tankerkoenig/const.py b/tests/components/tankerkoenig/const.py index 9ec64eb79a9..2c28753a7f3 100644 --- a/tests/components/tankerkoenig/const.py +++ b/tests/components/tankerkoenig/const.py @@ -2,6 +2,16 @@ from aiotankerkoenig import PriceInfo, Station, Status +from homeassistant.components.tankerkoenig.const import CONF_FUEL_TYPES, CONF_STATIONS +from homeassistant.const import ( + CONF_API_KEY, + CONF_LATITUDE, + CONF_LOCATION, + CONF_LONGITUDE, + CONF_NAME, + CONF_RADIUS, +) + NEARBY_STATIONS = [ Station( id="3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", @@ -49,6 +59,25 @@ STATION = Station( state="xxXX", ) +STATION_MISSING_FUELTYPE = Station( + id="3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", + name="Station ABC", + brand="Station", + street="Somewhere Street", + house_number="1", + post_code=1234, + place="Somewhere", + opening_times=[], + overrides=[], + whole_day=True, + is_open=True, + e5=1.719, + e10=1.659, + lat=51.1, + lng=13.1, + state="xxXX", +) + PRICES = { "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8": PriceInfo( status=Status.OPEN, @@ -57,3 +86,22 @@ PRICES = { diesel=1.659, ), } + +PRICES_MISSING_FUELTYPE = { + "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8": PriceInfo( + status=Status.OPEN, + e5=1.719, + e10=1.659, + ), +} + +CONFIG_DATA = { + CONF_NAME: "Home", + CONF_API_KEY: "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx", + CONF_FUEL_TYPES: ["e5"], + CONF_LOCATION: {CONF_LATITUDE: 51.0, CONF_LONGITUDE: 13.0}, + CONF_RADIUS: 2.0, + CONF_STATIONS: [ + "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", + ], +} diff --git a/tests/components/tankerkoenig/snapshots/test_binary_sensor.ambr b/tests/components/tankerkoenig/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..6b454820b05 --- /dev/null +++ b/tests/components/tankerkoenig/snapshots/test_binary_sensor.ambr @@ -0,0 +1,9 @@ +# serializer version: 1 +# name: test_binary_sensor + ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Station Somewhere Street 1 Status', + 'latitude': 51.1, + 'longitude': 13.1, + }) +# --- diff --git a/tests/components/tankerkoenig/snapshots/test_sensor.ambr b/tests/components/tankerkoenig/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..ec9a72e141d --- /dev/null +++ b/tests/components/tankerkoenig/snapshots/test_sensor.ambr @@ -0,0 +1,52 @@ +# serializer version: 1 +# name: test_sensor + ReadOnlyDict({ + 'attribution': 'Data provided by https://www.tankerkoenig.de', + 'brand': 'Station', + 'city': 'Somewhere', + 'friendly_name': 'Station Somewhere Street 1 Super E10', + 'fuel_type': , + 'house_number': '1', + 'latitude': 51.1, + 'longitude': 13.1, + 'postcode': 1234, + 'state_class': , + 'station_name': 'Station ABC', + 'street': 'Somewhere Street', + 'unit_of_measurement': '€', + }) +# --- +# name: test_sensor.1 + ReadOnlyDict({ + 'attribution': 'Data provided by https://www.tankerkoenig.de', + 'brand': 'Station', + 'city': 'Somewhere', + 'friendly_name': 'Station Somewhere Street 1 Super', + 'fuel_type': , + 'house_number': '1', + 'latitude': 51.1, + 'longitude': 13.1, + 'postcode': 1234, + 'state_class': , + 'station_name': 'Station ABC', + 'street': 'Somewhere Street', + 'unit_of_measurement': '€', + }) +# --- +# name: test_sensor.2 + ReadOnlyDict({ + 'attribution': 'Data provided by https://www.tankerkoenig.de', + 'brand': 'Station', + 'city': 'Somewhere', + 'friendly_name': 'Station Somewhere Street 1 Diesel', + 'fuel_type': , + 'house_number': '1', + 'latitude': 51.1, + 'longitude': 13.1, + 'postcode': 1234, + 'state_class': , + 'station_name': 'Station ABC', + 'street': 'Somewhere Street', + 'unit_of_measurement': '€', + }) +# --- diff --git a/tests/components/tankerkoenig/test_binary_sensor.py b/tests/components/tankerkoenig/test_binary_sensor.py new file mode 100644 index 00000000000..c103f2d26ff --- /dev/null +++ b/tests/components/tankerkoenig/test_binary_sensor.py @@ -0,0 +1,25 @@ +"""Tests for the Tankerkoening integration.""" + +from __future__ import annotations + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_ON +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("setup_integration") +async def test_binary_sensor( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the tankerkoenig binary sensors.""" + + state = hass.states.get("binary_sensor.station_somewhere_street_1_status") + assert state + assert state.state == STATE_ON + assert state.attributes == snapshot diff --git a/tests/components/tankerkoenig/test_config_flow.py b/tests/components/tankerkoenig/test_config_flow.py index b255491cb31..022b49fd3f8 100644 --- a/tests/components/tankerkoenig/test_config_flow.py +++ b/tests/components/tankerkoenig/test_config_flow.py @@ -1,6 +1,6 @@ """Tests for Tankerkoenig config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from aiotankerkoenig.exceptions import TankerkoenigInvalidKeyError @@ -21,6 +21,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.setup import async_setup_component from .const import NEARBY_STATIONS @@ -208,7 +209,7 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non assert entry.data[CONF_API_KEY] == "269534f6-aaaa-bbbb-cccc-yyyyzzzzxxxx" -async def test_options_flow(hass: HomeAssistant) -> None: +async def test_options_flow(hass: HomeAssistant, tankerkoenig: AsyncMock) -> None: """Test options flow.""" mock_config = MockConfigEntry( @@ -218,10 +219,17 @@ async def test_options_flow(hass: HomeAssistant) -> None: unique_id=f"{DOMAIN}_{MOCK_USER_DATA[CONF_LOCATION][CONF_LATITUDE]}_{MOCK_USER_DATA[CONF_LOCATION][CONF_LONGITUDE]}", ) mock_config.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() - with patch( - "homeassistant.components.tankerkoenig.config_flow.Tankerkoenig.nearby_stations", - return_value=NEARBY_STATIONS, + with ( + patch( + "homeassistant.components.tankerkoenig.config_flow.Tankerkoenig.nearby_stations", + return_value=NEARBY_STATIONS, + ), + patch( + "homeassistant.config_entries.ConfigEntries.async_reload" + ) as mock_async_reload, ): result = await hass.config_entries.options.async_init(mock_config.entry_id) assert result["type"] is FlowResultType.FORM @@ -237,6 +245,10 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert not mock_config.options[CONF_SHOW_ON_MAP] + await hass.async_block_till_done() + + assert mock_async_reload.call_count == 1 + async def test_options_flow_error(hass: HomeAssistant) -> None: """Test options flow.""" diff --git a/tests/components/tankerkoenig/test_coordinator.py b/tests/components/tankerkoenig/test_coordinator.py index 1e8991f3f9c..3ba0dc31c5f 100644 --- a/tests/components/tankerkoenig/test_coordinator.py +++ b/tests/components/tankerkoenig/test_coordinator.py @@ -15,14 +15,20 @@ import pytest from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.components.tankerkoenig.const import DEFAULT_SCAN_INTERVAL, DOMAIN +from homeassistant.components.tankerkoenig.const import ( + CONF_STATIONS, + DEFAULT_SCAN_INTERVAL, + DOMAIN, +) from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ID, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ID, CONF_SHOW_ON_MAP, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util +from .const import CONFIG_DATA + from tests.common import MockConfigEntry, async_fire_time_changed @@ -190,3 +196,38 @@ async def test_automatic_registry_cleanup( len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) == 1 ) + + +async def test_many_stations_warning( + hass: HomeAssistant, tankerkoenig: AsyncMock, caplog: pytest.LogCaptureFixture +) -> None: + """Test the warning about morethan 10 selected stations.""" + mock_config = MockConfigEntry( + domain=DOMAIN, + data={ + **CONFIG_DATA, + CONF_STATIONS: [ + "3bcd61da-xxxx-xxxx-xxxx-19d5523a7ae8", + "36b4b812-xxxx-xxxx-xxxx-c51735325858", + "54e2b642-xxxx-xxxx-xxxx-87cd4e9867f1", + "11b5c130-xxxx-xxxx-xxxx-856b8489b528", + "a9137924-xxxx-xxxx-xxxx-7029d7eb073f", + "57c6d275-xxxx-xxxx-xxxx-7f6ad9e6d638", + "bbc3c3a2-xxxx-xxxx-xxxx-840cc3d496b6", + "1db63dd9-xxxx-xxxx-xxxx-a889b53cbc65", + "18d7262e-xxxx-xxxx-xxxx-4a61ad302e14", + "a8041aa3-xxxx-xxxx-xxxx-7c6b180e5a40", + "739aa0eb-xxxx-xxxx-xxxx-a3d7b6c8a42f", + "9ad9fb26-xxxx-xxxx-xxxx-84e6a02b3096", + "74267867-xxxx-xxxx-xxxx-74ce3d45882c", + "86657222-xxxx-xxxx-xxxx-a2b795ab3cf9", + ], + }, + options={CONF_SHOW_ON_MAP: True}, + unique_id="51.0_13.0", + ) + mock_config.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + assert "Found more than 10 stations to check" in caplog.text diff --git a/tests/components/tankerkoenig/test_sensor.py b/tests/components/tankerkoenig/test_sensor.py new file mode 100644 index 00000000000..788c1de7021 --- /dev/null +++ b/tests/components/tankerkoenig/test_sensor.py @@ -0,0 +1,65 @@ +"""Tests for the Tankerkoening integration.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.tankerkoenig import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .const import PRICES_MISSING_FUELTYPE, STATION_MISSING_FUELTYPE + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("setup_integration") +async def test_sensor( + hass: HomeAssistant, + tankerkoenig: AsyncMock, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the tankerkoenig sensors.""" + + state = hass.states.get("sensor.station_somewhere_street_1_super_e10") + assert state + assert state.state == "1.659" + assert state.attributes == snapshot + + state = hass.states.get("sensor.station_somewhere_street_1_super") + assert state + assert state.state == "1.719" + assert state.attributes == snapshot + + state = hass.states.get("sensor.station_somewhere_street_1_diesel") + assert state + assert state.state == "1.659" + assert state.attributes == snapshot + + +async def test_sensor_missing_fueltype( + hass: HomeAssistant, + tankerkoenig: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test the tankerkoenig sensors.""" + tankerkoenig.station_details.return_value = STATION_MISSING_FUELTYPE + tankerkoenig.prices.return_value = PRICES_MISSING_FUELTYPE + + config_entry.add_to_hass(hass) + + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + state = hass.states.get("sensor.station_somewhere_street_1_super_e10") + assert state + + state = hass.states.get("sensor.station_somewhere_street_1_super") + assert state + + state = hass.states.get("sensor.station_somewhere_street_1_diesel") + assert not state diff --git a/tests/components/tasmota/test_common.py b/tests/components/tasmota/test_common.py index 360794e280f..499e732719c 100644 --- a/tests/components/tasmota/test_common.py +++ b/tests/components/tasmota/test_common.py @@ -163,7 +163,7 @@ async def help_test_availability_when_connection_lost( # Disconnected from MQTT server -> state changed to unavailable mqtt_mock.connected = False - await hass.async_add_executor_job(mqtt_client_mock.on_disconnect, None, None, 0) + mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -172,7 +172,7 @@ async def help_test_availability_when_connection_lost( # Reconnected to MQTT server -> state still unavailable mqtt_mock.connected = True - await hass.async_add_executor_job(mqtt_client_mock.on_connect, None, None, None, 0) + mqtt_client_mock.on_connect(None, None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -224,7 +224,7 @@ async def help_test_deep_sleep_availability_when_connection_lost( # Disconnected from MQTT server -> state changed to unavailable mqtt_mock.connected = False - await hass.async_add_executor_job(mqtt_client_mock.on_disconnect, None, None, 0) + mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -233,7 +233,7 @@ async def help_test_deep_sleep_availability_when_connection_lost( # Reconnected to MQTT server -> state no longer unavailable mqtt_mock.connected = True - await hass.async_add_executor_job(mqtt_client_mock.on_connect, None, None, None, 0) + mqtt_client_mock.on_connect(None, None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -476,7 +476,7 @@ async def help_test_availability_poll_state( # Disconnected from MQTT server mqtt_mock.connected = False - await hass.async_add_executor_job(mqtt_client_mock.on_disconnect, None, None, 0) + mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() @@ -484,7 +484,7 @@ async def help_test_availability_poll_state( # Reconnected to MQTT server mqtt_mock.connected = True - await hass.async_add_executor_job(mqtt_client_mock.on_connect, None, None, None, 0) + mqtt_client_mock.on_connect(None, None, None, 0) await hass.async_block_till_done() await hass.async_block_till_done() await hass.async_block_till_done() diff --git a/tests/components/tautulli/test_config_flow.py b/tests/components/tautulli/test_config_flow.py index b731067cd72..ca563cfad77 100644 --- a/tests/components/tautulli/test_config_flow.py +++ b/tests/components/tautulli/test_config_flow.py @@ -133,7 +133,7 @@ async def test_flow_user_multiple_entries_allowed(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - input = { + user_input = { CONF_URL: "http://1.2.3.5:8181/test", CONF_API_KEY: "efgh", CONF_VERIFY_SSL: True, @@ -141,13 +141,13 @@ async def test_flow_user_multiple_entries_allowed(hass: HomeAssistant) -> None: with patch_config_flow_tautulli(AsyncMock()): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=input, + user_input=user_input, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == NAME - assert result2["data"] == input + assert result2["data"] == user_input async def test_flow_reauth( diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index f252787b37c..9040ec96a03 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -7,7 +7,23 @@ from unittest.mock import patch import pytest -from .const import LIVE_STATUS, PRODUCTS, RESPONSE_OK, VEHICLE_DATA, WAKE_UP_ONLINE +from .const import ( + LIVE_STATUS, + METADATA, + PRODUCTS, + RESPONSE_OK, + VEHICLE_DATA, + WAKE_UP_ONLINE, +) + + +@pytest.fixture(autouse=True) +def mock_metadata(): + """Mock Tesla Fleet Api metadata method.""" + with patch( + "homeassistant.components.teslemetry.Teslemetry.metadata", return_value=METADATA + ) as mock_products: + yield mock_products @pytest.fixture(autouse=True) diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index 776cc231a5c..96e9ead8912 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -16,3 +16,21 @@ VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) RESPONSE_OK = {"response": {}, "error": None} + +METADATA = { + "region": "NA", + "scopes": [ + "openid", + "offline_access", + "user_data", + "vehicle_device_data", + "vehicle_cmds", + "vehicle_charging_cmds", + "energy_device_data", + "energy_cmds", + ], +} +METADATA_NOSCOPE = { + "region": "NA", + "scopes": ["openid", "offline_access", "vehicle_device_data"], +} diff --git a/tests/components/teslemetry/snapshots/test_sensor.ambr b/tests/components/teslemetry/snapshots/test_sensor.ambr index 81142e40901..0d817ad1f7e 100644 --- a/tests/components/teslemetry/snapshots/test_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_sensor.ambr @@ -719,7 +719,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -736,7 +738,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Battery level', 'platform': 'teslemetry', @@ -744,13 +746,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_battery_level', 'unique_id': 'VINVINVIN-charge_state_battery_level', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_battery_level-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_battery_level', @@ -763,7 +768,10 @@ # name: test_sensors[sensor.test_battery_level-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_battery_level', @@ -778,7 +786,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -794,8 +804,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Battery range', 'platform': 'teslemetry', @@ -803,33 +819,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_battery_range', 'unique_id': 'VINVINVIN-charge_state_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_charge_cable-entry] @@ -843,7 +865,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charge_cable', 'has_entity_name': True, 'hidden_by': None, @@ -896,7 +918,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -912,8 +936,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charge energy added', 'platform': 'teslemetry', @@ -921,13 +948,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charge_energy_added', 'unique_id': 'VINVINVIN-charge_state_charge_energy_added', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charge_energy_added-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'energy', 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_energy_added', @@ -940,7 +970,10 @@ # name: test_sensors[sensor.test_charge_energy_added-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'energy', 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_energy_added', @@ -955,13 +988,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charge_rate', 'has_entity_name': True, 'hidden_by': None, @@ -971,8 +1006,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charge rate', 'platform': 'teslemetry', @@ -980,13 +1018,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charge_rate', 'unique_id': 'VINVINVIN-charge_state_charge_rate', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charge_rate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_rate', @@ -999,7 +1040,10 @@ # name: test_sensors[sensor.test_charge_rate-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charge_rate', @@ -1014,13 +1058,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charger_current', 'has_entity_name': True, 'hidden_by': None, @@ -1031,7 +1077,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger current', 'platform': 'teslemetry', @@ -1039,13 +1085,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_actual_current', 'unique_id': 'VINVINVIN-charge_state_charger_actual_current', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_current-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'current', 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_current', @@ -1058,7 +1107,10 @@ # name: test_sensors[sensor.test_charger_current-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'current', 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_current', @@ -1073,7 +1125,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1090,7 +1144,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger power', 'platform': 'teslemetry', @@ -1098,13 +1152,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_power', 'unique_id': 'VINVINVIN-charge_state_charger_power', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_power-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_power', @@ -1117,7 +1174,10 @@ # name: test_sensors[sensor.test_charger_power-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_power', @@ -1132,13 +1192,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_charger_voltage', 'has_entity_name': True, 'hidden_by': None, @@ -1149,7 +1211,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charger voltage', 'platform': 'teslemetry', @@ -1157,13 +1219,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_charger_voltage', 'unique_id': 'VINVINVIN-charge_state_charger_voltage', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_charger_voltage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_voltage', @@ -1176,7 +1241,10 @@ # name: test_sensors[sensor.test_charger_voltage-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_charger_voltage', @@ -1191,7 +1259,16 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1208,7 +1285,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Charging', 'platform': 'teslemetry', @@ -1222,27 +1299,45 @@ # name: test_sensors[sensor.test_charging-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), }), 'context': , 'entity_id': 'sensor.test_charging', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Stopped', + 'state': 'stopped', }) # --- # name: test_sensors[sensor.test_charging-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), }), 'context': , 'entity_id': 'sensor.test_charging', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Stopped', + 'state': 'stopped', }) # --- # name: test_sensors[sensor.test_distance_to_arrival-entry] @@ -1250,7 +1345,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1266,8 +1363,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Distance to arrival', 'platform': 'teslemetry', @@ -1275,26 +1375,32 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_miles_to_arrival', 'unique_id': 'VINVINVIN-drive_state_active_route_miles_to_arrival', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_distance_to_arrival-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_distance_to_arrival', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.039491', + 'state': '0.063555', }) # --- # name: test_sensors[sensor.test_distance_to_arrival-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_distance_to_arrival', @@ -1309,13 +1415,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_driver_temperature_setting', 'has_entity_name': True, 'hidden_by': None, @@ -1325,8 +1433,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Driver temperature setting', 'platform': 'teslemetry', @@ -1334,13 +1445,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_driver_temp_setting', 'unique_id': 'VINVINVIN-climate_state_driver_temp_setting', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_driver_temperature_setting-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_driver_temperature_setting', @@ -1353,7 +1467,10 @@ # name: test_sensors[sensor.test_driver_temperature_setting-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_driver_temperature_setting', @@ -1368,7 +1485,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1384,8 +1503,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Estimate battery range', 'platform': 'teslemetry', @@ -1393,33 +1518,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_est_battery_range', 'unique_id': 'VINVINVIN-charge_state_est_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_estimate_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_estimate_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '275.04', + 'state': '442.63397376', }) # --- # name: test_sensors[sensor.test_estimate_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_estimate_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '275.04', + 'state': '442.63397376', }) # --- # name: test_sensors[sensor.test_fast_charger_type-entry] @@ -1433,7 +1564,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_fast_charger_type', 'has_entity_name': True, 'hidden_by': None, @@ -1486,7 +1617,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1502,8 +1635,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Ideal battery range', 'platform': 'teslemetry', @@ -1511,33 +1650,39 @@ 'supported_features': 0, 'translation_key': 'charge_state_ideal_battery_range', 'unique_id': 'VINVINVIN-charge_state_ideal_battery_range', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_ideal_battery_range-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_ideal_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_ideal_battery_range-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_ideal_battery_range', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '266.87', + 'state': '429.48563328', }) # --- # name: test_sensors[sensor.test_inside_temperature-entry] @@ -1545,7 +1690,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1561,8 +1708,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Inside temperature', 'platform': 'teslemetry', @@ -1570,13 +1720,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_inside_temp', 'unique_id': 'VINVINVIN-climate_state_inside_temp', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_inside_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_inside_temperature', @@ -1589,7 +1742,10 @@ # name: test_sensors[sensor.test_inside_temperature-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_inside_temperature', @@ -1604,13 +1760,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_odometer', 'has_entity_name': True, 'hidden_by': None, @@ -1620,8 +1778,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Odometer', 'platform': 'teslemetry', @@ -1629,33 +1793,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_odometer', 'unique_id': 'VINVINVIN-vehicle_state_odometer', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_odometer-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_odometer', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6481.019282', + 'state': '10430.189495371', }) # --- # name: test_sensors[sensor.test_odometer-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'distance', 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_odometer', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6481.019282', + 'state': '10430.189495371', }) # --- # name: test_sensors[sensor.test_outside_temperature-entry] @@ -1663,7 +1833,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1679,8 +1851,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Outside temperature', 'platform': 'teslemetry', @@ -1688,13 +1863,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_outside_temp', 'unique_id': 'VINVINVIN-climate_state_outside_temp', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_outside_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_outside_temperature', @@ -1707,7 +1885,10 @@ # name: test_sensors[sensor.test_outside_temperature-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_outside_temperature', @@ -1722,13 +1903,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_passenger_temperature_setting', 'has_entity_name': True, 'hidden_by': None, @@ -1738,8 +1921,11 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Passenger temperature setting', 'platform': 'teslemetry', @@ -1747,13 +1933,16 @@ 'supported_features': 0, 'translation_key': 'climate_state_passenger_temp_setting', 'unique_id': 'VINVINVIN-climate_state_passenger_temp_setting', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_passenger_temperature_setting-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_passenger_temperature_setting', @@ -1766,7 +1955,10 @@ # name: test_sensors[sensor.test_passenger_temperature_setting-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_passenger_temperature_setting', @@ -1781,13 +1973,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_power', 'has_entity_name': True, 'hidden_by': None, @@ -1798,7 +1992,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Power', 'platform': 'teslemetry', @@ -1806,13 +2000,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_power', 'unique_id': 'VINVINVIN-drive_state_power', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_power-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_power', @@ -1825,7 +2022,10 @@ # name: test_sensors[sensor.test_power-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'power', 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_power', @@ -1840,7 +2040,14 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1857,7 +2064,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Shift state', 'platform': 'teslemetry', @@ -1871,27 +2078,41 @@ # name: test_sensors[sensor.test_shift_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), }), 'context': , 'entity_id': 'sensor.test_shift_state', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'p', }) # --- # name: test_sensors[sensor.test_shift_state-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'enum', 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), }), 'context': , 'entity_id': 'sensor.test_shift_state', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'p', }) # --- # name: test_sensors[sensor.test_speed-entry] @@ -1899,7 +2120,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -1915,8 +2138,11 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Speed', 'platform': 'teslemetry', @@ -1924,33 +2150,39 @@ 'supported_features': 0, 'translation_key': 'drive_state_speed', 'unique_id': 'VINVINVIN-drive_state_speed', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_speed-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_speed', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_speed-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'speed', 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_speed', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0', }) # --- # name: test_sensors[sensor.test_state_of_charge_at_arrival-entry] @@ -1958,13 +2190,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', 'has_entity_name': True, 'hidden_by': None, @@ -1975,7 +2209,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'State of charge at arrival', 'platform': 'teslemetry', @@ -1983,13 +2217,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_energy_at_arrival', 'unique_id': 'VINVINVIN-drive_state_active_route_energy_at_arrival', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_state_of_charge_at_arrival-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', @@ -2002,7 +2239,10 @@ # name: test_sensors[sensor.test_state_of_charge_at_arrival-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_state_of_charge_at_arrival', @@ -2139,13 +2379,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'has_entity_name': True, 'hidden_by': None, @@ -2155,8 +2397,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure front left', 'platform': 'teslemetry', @@ -2164,33 +2412,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fl', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_fl', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_front_left-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_left-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-entry] @@ -2198,13 +2452,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'has_entity_name': True, 'hidden_by': None, @@ -2214,8 +2470,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure front right', 'platform': 'teslemetry', @@ -2223,33 +2485,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_fr', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_fr', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.8', + 'state': '40.6105682912393', }) # --- # name: test_sensors[sensor.test_tire_pressure_front_right-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_front_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.8', + 'state': '40.6105682912393', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-entry] @@ -2257,13 +2525,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'has_entity_name': True, 'hidden_by': None, @@ -2273,8 +2543,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure rear left', 'platform': 'teslemetry', @@ -2282,33 +2558,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rl', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_rl', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_left-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_left', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-entry] @@ -2316,13 +2598,15 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'has_entity_name': True, 'hidden_by': None, @@ -2332,8 +2616,14 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Tire pressure rear right', 'platform': 'teslemetry', @@ -2341,33 +2631,39 @@ 'supported_features': 0, 'translation_key': 'vehicle_state_tpms_pressure_rr', 'unique_id': 'VINVINVIN-vehicle_state_tpms_pressure_rr', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_tire_pressure_rear_right-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_tire_pressure_rear_right', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2.775', + 'state': '40.2479739314961', }) # --- # name: test_sensors[sensor.test_traffic_delay-entry] @@ -2375,7 +2671,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -2392,7 +2690,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Traffic delay', 'platform': 'teslemetry', @@ -2400,13 +2698,16 @@ 'supported_features': 0, 'translation_key': 'drive_state_active_route_traffic_minutes_delay', 'unique_id': 'VINVINVIN-drive_state_active_route_traffic_minutes_delay', - 'unit_of_measurement': None, + 'unit_of_measurement': , }) # --- # name: test_sensors[sensor.test_traffic_delay-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'duration', 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_traffic_delay', @@ -2419,7 +2720,10 @@ # name: test_sensors[sensor.test_traffic_delay-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'duration', 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.test_traffic_delay', @@ -2434,7 +2738,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -2451,7 +2757,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Usable battery level', 'platform': 'teslemetry', @@ -2459,13 +2765,16 @@ 'supported_features': 0, 'translation_key': 'charge_state_usable_battery_level', 'unique_id': 'VINVINVIN-charge_state_usable_battery_level', - 'unit_of_measurement': None, + 'unit_of_measurement': '%', }) # --- # name: test_sensors[sensor.test_usable_battery_level-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_usable_battery_level', @@ -2478,7 +2787,10 @@ # name: test_sensors[sensor.test_usable_battery_level-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'device_class': 'battery', 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', }), 'context': , 'entity_id': 'sensor.test_usable_battery_level', diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index e83e9d648cd..a05bc07b305 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -22,11 +22,11 @@ from homeassistant.components.climate import ( from homeassistant.components.teslemetry.coordinator import SYNC_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from . import assert_entities, setup_platform -from .const import WAKE_UP_ASLEEP, WAKE_UP_ONLINE +from .const import METADATA_NOSCOPE, WAKE_UP_ASLEEP, WAKE_UP_ONLINE from tests.common import async_fire_time_changed @@ -176,3 +176,30 @@ async def test_asleep_or_offline( ) await hass.async_block_till_done() mock_wake_up.assert_called_once() + + +async def test_climate_noscope( + hass: HomeAssistant, + mock_metadata, +) -> None: + """Tests that the climate entity is correct.""" + mock_metadata.return_value = METADATA_NOSCOPE + + await setup_platform(hass, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVACMode.HEAT_COOL}, + blocking=True, + ) + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 20}, + blocking=True, + ) diff --git a/tests/components/tessie/test_lock.py b/tests/components/tessie/test_lock.py index ca921583d97..0371b592f07 100644 --- a/tests/components/tessie/test_lock.py +++ b/tests/components/tessie/test_lock.py @@ -15,8 +15,9 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED, Pl from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.issue_registry import async_get as async_get_issue_registry -from .common import assert_entities, setup_platform +from .common import DOMAIN, assert_entities, setup_platform async def test_locks( @@ -24,6 +25,17 @@ async def test_locks( ) -> None: """Tests that the lock entity is correct.""" + # Create the deprecated speed limit lock entity + entity_registry.async_get_or_create( + LOCK_DOMAIN, + DOMAIN, + "VINVINVIN-vehicle_state_speed_limit_mode_active", + original_name="Charge cable lock", + has_entity_name=True, + translation_key="vehicle_state_speed_limit_mode_active", + disabled_by=er.RegistryEntryDisabler.INTEGRATION, + ) + entry = await setup_platform(hass, [Platform.LOCK]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) @@ -72,19 +84,47 @@ async def test_locks( assert hass.states.get(entity_id).state == STATE_UNLOCKED mock_run.assert_called_once() + +async def test_speed_limit_lock( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Tests that the deprecated speed limit lock entity is correct.""" + + issue_registry = async_get_issue_registry(hass) + + # Create the deprecated speed limit lock entity + entity = entity_registry.async_get_or_create( + LOCK_DOMAIN, + DOMAIN, + "VINVINVIN-vehicle_state_speed_limit_mode_active", + original_name="Charge cable lock", + has_entity_name=True, + translation_key="vehicle_state_speed_limit_mode_active", + ) + + with patch( + "homeassistant.components.tessie.lock.automations_with_entity", + return_value=["item"], + ): + await setup_platform(hass, [Platform.LOCK]) + assert issue_registry.async_get_issue( + DOMAIN, f"deprecated_speed_limit_{entity.entity_id}_item" + ) + # Test lock set value functions - entity_id = "lock.test_speed_limit" with patch( "homeassistant.components.tessie.lock.enable_speed_limit" ) as mock_enable_speed_limit: await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, - {ATTR_ENTITY_ID: [entity_id], ATTR_CODE: "1234"}, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_LOCKED + assert hass.states.get(entity.entity_id).state == STATE_LOCKED mock_enable_speed_limit.assert_called_once() + # Assert issue has been raised in the issue register + assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_locked") with patch( "homeassistant.components.tessie.lock.disable_speed_limit" @@ -92,16 +132,17 @@ async def test_locks( await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, - {ATTR_ENTITY_ID: [entity_id], ATTR_CODE: "1234"}, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "1234"}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_UNLOCKED + assert hass.states.get(entity.entity_id).state == STATE_UNLOCKED mock_disable_speed_limit.assert_called_once() + assert issue_registry.async_get_issue(DOMAIN, "deprecated_speed_limit_unlocked") with pytest.raises(ServiceValidationError): await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, - {ATTR_ENTITY_ID: [entity_id], ATTR_CODE: "abc"}, + {ATTR_ENTITY_ID: [entity.entity_id], ATTR_CODE: "abc"}, blocking=True, ) diff --git a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr new file mode 100644 index 00000000000..8261cd74859 --- /dev/null +++ b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr @@ -0,0 +1,117 @@ +# serializer version: 1 +# name: test_attributes[alarm_control_panel.test-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.test', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456', + 'unit_of_measurement': None, + }) +# --- +# name: test_attributes[alarm_control_panel.test-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'ac_loss': False, + 'changed_by': None, + 'code_arm_required': True, + 'code_format': None, + 'cover_tampered': False, + 'friendly_name': 'test', + 'location_id': '123456', + 'location_name': 'test', + 'low_battery': False, + 'partition': 1, + 'supported_features': , + 'triggered_source': None, + 'triggered_zone': None, + }), + 'context': , + 'entity_id': 'alarm_control_panel.test', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- +# name: test_attributes[alarm_control_panel.test_partition_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.test_partition_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Partition 2', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'partition', + 'unique_id': '123456_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_attributes[alarm_control_panel.test_partition_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'ac_loss': False, + 'changed_by': None, + 'code_arm_required': True, + 'code_format': None, + 'cover_tampered': False, + 'friendly_name': 'test Partition 2', + 'location_id': '123456', + 'location_name': 'test partition 2', + 'low_battery': False, + 'partition': 2, + 'supported_features': , + 'triggered_source': None, + 'triggered_zone': None, + }), + 'context': , + 'entity_id': 'alarm_control_panel.test_partition_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- diff --git a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..54089c6f192 --- /dev/null +++ b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr @@ -0,0 +1,1095 @@ +# serializer version: 1 +# name: test_entity_registry[binary_sensor.fire-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.fire', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_2_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.fire-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'Fire', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '2', + }), + 'context': , + 'entity_id': 'binary_sensor.fire', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.fire_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fire_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_2_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.fire_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Fire Battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '2', + }), + 'context': , + 'entity_id': 'binary_sensor.fire_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_registry[binary_sensor.fire_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.fire_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_2_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.fire_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Fire Tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '2', + }), + 'context': , + 'entity_id': 'binary_sensor.fire_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.gas-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.gas', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_3_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.gas-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'gas', + 'friendly_name': 'Gas', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '3', + }), + 'context': , + 'entity_id': 'binary_sensor.gas', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.gas_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.gas_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_3_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.gas_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Gas Battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '3', + }), + 'context': , + 'entity_id': 'binary_sensor.gas_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.gas_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.gas_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_3_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.gas_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Gas Tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '3', + }), + 'context': , + 'entity_id': 'binary_sensor.gas_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_registry[binary_sensor.medical-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.medical', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_5_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.medical-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'safety', + 'friendly_name': 'Medical', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '5', + }), + 'context': , + 'entity_id': 'binary_sensor.medical', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_4_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'Motion', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '4', + }), + 'context': , + 'entity_id': 'binary_sensor.motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.motion_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.motion_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_4_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.motion_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Motion Battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '4', + }), + 'context': , + 'entity_id': 'binary_sensor.motion_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.motion_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.motion_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_4_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.motion_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Motion Tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '4', + }), + 'context': , + 'entity_id': 'binary_sensor.motion_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.security-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.security', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_1_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.security-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Security', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '1', + }), + 'context': , + 'entity_id': 'binary_sensor.security', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_registry[binary_sensor.security_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.security_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_1_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.security_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Security Battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '1', + }), + 'context': , + 'entity_id': 'binary_sensor.security_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.security_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.security_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_1_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.security_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Security Tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '1', + }), + 'context': , + 'entity_id': 'binary_sensor.security_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_7_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Temperature', + 'location_id': '123456', + 'partition': '1', + 'zone_id': 7, + }), + 'context': , + 'entity_id': 'binary_sensor.temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.temperature_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.temperature_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_7_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.temperature_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Temperature Battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': 7, + }), + 'context': , + 'entity_id': 'binary_sensor.temperature_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.temperature_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.temperature_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_7_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.temperature_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Temperature Tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': 7, + }), + 'context': , + 'entity_id': 'binary_sensor.temperature_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.test_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'test Battery', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.test_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_power', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'test Power', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.test_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'test Tamper', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.unknown-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.unknown', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_6_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.unknown-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Unknown', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '6', + }), + 'context': , + 'entity_id': 'binary_sensor.unknown', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.unknown_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.unknown_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_6_low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.unknown_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Unknown Battery', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '6', + }), + 'context': , + 'entity_id': 'binary_sensor.unknown_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.unknown_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.unknown_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_6_tamper', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.unknown_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Unknown Tamper', + 'location_id': '123456', + 'partition': '1', + 'zone_id': '6', + }), + 'context': , + 'entity_id': 'binary_sensor.unknown_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/totalconnect/test_alarm_control_panel.py b/tests/components/totalconnect/test_alarm_control_panel.py index fa2e997756d..176fe54c34a 100644 --- a/tests/components/totalconnect/test_alarm_control_panel.py +++ b/tests/components/totalconnect/test_alarm_control_panel.py @@ -4,6 +4,7 @@ from datetime import timedelta from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from total_connect_client.exceptions import ServiceUnavailable, TotalConnectError from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN @@ -14,7 +15,6 @@ from homeassistant.components.totalconnect.alarm_control_panel import ( ) from homeassistant.const import ( ATTR_ENTITY_ID, - ATTR_FRIENDLY_NAME, SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, @@ -36,7 +36,6 @@ from homeassistant.helpers.entity_component import async_update_entity from homeassistant.util import dt as dt_util from .common import ( - LOCATION_ID, RESPONSE_ARM_FAILURE, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY, @@ -58,7 +57,7 @@ from .common import ( setup_platform, ) -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform ENTITY_ID = "alarm_control_panel.test" ENTITY_ID_2 = "alarm_control_panel.test_partition_2" @@ -67,28 +66,20 @@ DATA = {ATTR_ENTITY_ID: ENTITY_ID} DELAY = timedelta(seconds=10) -async def test_attributes(hass: HomeAssistant) -> None: +async def test_attributes( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test the alarm control panel attributes are correct.""" - await setup_platform(hass, ALARM_DOMAIN) + entry = await setup_platform(hass, ALARM_DOMAIN) with patch( "homeassistant.components.totalconnect.TotalConnectClient.request", return_value=RESPONSE_DISARMED, ) as mock_request: await async_update_entity(hass, ENTITY_ID) await hass.async_block_till_done() - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_ALARM_DISARMED mock_request.assert_called_once() - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "test" - entity_registry = er.async_get(hass) - entry = entity_registry.async_get(ENTITY_ID) - # TotalConnect partition #1 alarm device unique_id is the location_id - assert entry.unique_id == LOCATION_ID - - entry2 = entity_registry.async_get(ENTITY_ID_2) - # TotalConnect partition #2 unique_id is the location_id + "_{partition_number}" - assert entry2.unique_id == LOCATION_ID + "_2" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) assert mock_request.call_count == 1 diff --git a/tests/components/totalconnect/test_binary_sensor.py b/tests/components/totalconnect/test_binary_sensor.py index 8ff548850d9..dc433129ac8 100644 --- a/tests/components/totalconnect/test_binary_sensor.py +++ b/tests/components/totalconnect/test_binary_sensor.py @@ -2,6 +2,8 @@ from unittest.mock import patch +from syrupy import SnapshotAssertion + from homeassistant.components.binary_sensor import ( DOMAIN as BINARY_SENSOR, BinarySensorDeviceClass, @@ -10,41 +12,25 @@ from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import LOCATION_ID, RESPONSE_DISARMED, ZONE_NORMAL, setup_platform +from .common import RESPONSE_DISARMED, ZONE_NORMAL, setup_platform + +from tests.common import snapshot_platform ZONE_ENTITY_ID = "binary_sensor.security" -ZONE_LOW_BATTERY_ID = "binary_sensor.security_low_battery" +ZONE_LOW_BATTERY_ID = "binary_sensor.security_battery" ZONE_TAMPER_ID = "binary_sensor.security_tamper" -PANEL_BATTERY_ID = "binary_sensor.test_low_battery" +PANEL_BATTERY_ID = "binary_sensor.test_battery" PANEL_TAMPER_ID = "binary_sensor.test_tamper" PANEL_POWER_ID = "binary_sensor.test_power" -async def test_entity_registry(hass: HomeAssistant) -> None: +async def test_entity_registry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: """Test the binary sensor is registered in entity registry.""" - await setup_platform(hass, BINARY_SENSOR) - entity_registry = er.async_get(hass) + entry = await setup_platform(hass, BINARY_SENSOR) - # ensure zone 1 plus two diagnostic zones are created - entry = entity_registry.async_get(ZONE_ENTITY_ID) - entry_low_battery = entity_registry.async_get(ZONE_LOW_BATTERY_ID) - entry_tamper = entity_registry.async_get(ZONE_TAMPER_ID) - - assert entry.unique_id == f"{LOCATION_ID}_{ZONE_NORMAL['ZoneID']}_zone" - assert ( - entry_low_battery.unique_id - == f"{LOCATION_ID}_{ZONE_NORMAL['ZoneID']}_low_battery" - ) - assert entry_tamper.unique_id == f"{LOCATION_ID}_{ZONE_NORMAL['ZoneID']}_tamper" - - # ensure panel diagnostic zones are created - panel_battery = entity_registry.async_get(PANEL_BATTERY_ID) - panel_tamper = entity_registry.async_get(PANEL_TAMPER_ID) - panel_power = entity_registry.async_get(PANEL_POWER_ID) - - assert panel_battery.unique_id == f"{LOCATION_ID}_low_battery" - assert panel_tamper.unique_id == f"{LOCATION_ID}_tamper" - assert panel_power.unique_id == f"{LOCATION_ID}_power" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_state_and_attributes(hass: HomeAssistant) -> None: @@ -63,7 +49,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: ) assert state.attributes.get("device_class") == BinarySensorDeviceClass.DOOR - state = hass.states.get(f"{ZONE_ENTITY_ID}_low_battery") + state = hass.states.get(f"{ZONE_ENTITY_ID}_battery") assert state.state == STATE_OFF state = hass.states.get(f"{ZONE_ENTITY_ID}_tamper") assert state.state == STATE_OFF @@ -72,7 +58,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.fire") assert state.state == STATE_OFF assert state.attributes.get("device_class") == BinarySensorDeviceClass.SMOKE - state = hass.states.get("binary_sensor.fire_low_battery") + state = hass.states.get("binary_sensor.fire_battery") assert state.state == STATE_ON state = hass.states.get("binary_sensor.fire_tamper") assert state.state == STATE_OFF @@ -81,7 +67,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.gas") assert state.state == STATE_OFF assert state.attributes.get("device_class") == BinarySensorDeviceClass.GAS - state = hass.states.get("binary_sensor.gas_low_battery") + state = hass.states.get("binary_sensor.gas_battery") assert state.state == STATE_OFF state = hass.states.get("binary_sensor.gas_tamper") assert state.state == STATE_ON @@ -90,7 +76,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.unknown") assert state.state == STATE_OFF assert state.attributes.get("device_class") == BinarySensorDeviceClass.DOOR - state = hass.states.get("binary_sensor.unknown_low_battery") + state = hass.states.get("binary_sensor.unknown_battery") assert state.state == STATE_OFF state = hass.states.get("binary_sensor.unknown_tamper") assert state.state == STATE_OFF @@ -99,7 +85,7 @@ async def test_state_and_attributes(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.temperature") assert state.state == STATE_OFF assert state.attributes.get("device_class") == BinarySensorDeviceClass.PROBLEM - state = hass.states.get("binary_sensor.temperature_low_battery") + state = hass.states.get("binary_sensor.temperature_battery") assert state.state == STATE_OFF state = hass.states.get("binary_sensor.temperature_tamper") assert state.state == STATE_OFF diff --git a/tests/components/traccar_server/snapshots/test_diagnostics.ambr b/tests/components/traccar_server/snapshots/test_diagnostics.ambr index 300444f10f1..39e67db8df7 100644 --- a/tests/components/traccar_server/snapshots/test_diagnostics.ambr +++ b/tests/components/traccar_server/snapshots/test_diagnostics.ambr @@ -73,7 +73,30 @@ 'entities': list([ dict({ 'disabled': False, - 'enity_id': 'device_tracker.x_wing', + 'entity_id': 'binary_sensor.x_wing_motion', + 'state': dict({ + 'attributes': dict({ + 'device_class': 'motion', + 'friendly_name': 'X-Wing Motion', + }), + 'state': 'off', + }), + 'unit_of_measurement': None, + }), + dict({ + 'disabled': False, + 'entity_id': 'binary_sensor.x_wing_status', + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'X-Wing Status', + }), + 'state': 'on', + }), + 'unit_of_measurement': None, + }), + dict({ + 'disabled': False, + 'entity_id': 'device_tracker.x_wing', 'state': dict({ 'attributes': dict({ 'category': 'starfighter', @@ -82,9 +105,7 @@ 'gps_accuracy': 3.5, 'latitude': '**REDACTED**', 'longitude': '**REDACTED**', - 'motion': False, 'source_type': 'gps', - 'status': 'online', 'traccar_id': 0, 'tracker': 'traccar_server', }), @@ -94,7 +115,31 @@ }), dict({ 'disabled': False, - 'enity_id': 'sensor.x_wing_battery', + 'entity_id': 'sensor.x_wing_address', + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'X-Wing Address', + }), + 'state': '**REDACTED**', + }), + 'unit_of_measurement': None, + }), + dict({ + 'disabled': False, + 'entity_id': 'sensor.x_wing_altitude', + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'X-Wing Altitude', + 'state_class': 'measurement', + 'unit_of_measurement': 'm', + }), + 'state': '546841384638', + }), + 'unit_of_measurement': 'm', + }), + dict({ + 'disabled': False, + 'entity_id': 'sensor.x_wing_battery', 'state': dict({ 'attributes': dict({ 'device_class': 'battery', @@ -108,7 +153,18 @@ }), dict({ 'disabled': False, - 'enity_id': 'sensor.x_wing_speed', + 'entity_id': 'sensor.x_wing_geofence', + 'state': dict({ + 'attributes': dict({ + 'friendly_name': 'X-Wing Geofence', + }), + 'state': 'Tatooine', + }), + 'unit_of_measurement': None, + }), + dict({ + 'disabled': False, + 'entity_id': 'sensor.x_wing_speed', 'state': dict({ 'attributes': dict({ 'device_class': 'speed', @@ -120,41 +176,6 @@ }), 'unit_of_measurement': 'kn', }), - dict({ - 'disabled': False, - 'enity_id': 'sensor.x_wing_altitude', - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'X-Wing Altitude', - 'state_class': 'measurement', - 'unit_of_measurement': 'm', - }), - 'state': '546841384638', - }), - 'unit_of_measurement': 'm', - }), - dict({ - 'disabled': False, - 'enity_id': 'sensor.x_wing_address', - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'X-Wing Address', - }), - 'state': '**REDACTED**', - }), - 'unit_of_measurement': None, - }), - dict({ - 'disabled': False, - 'enity_id': 'sensor.x_wing_geofence', - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'X-Wing Geofence', - }), - 'state': 'Tatooine', - }), - 'unit_of_measurement': None, - }), ]), 'subscription_status': 'disconnected', }) @@ -233,39 +254,51 @@ 'entities': list([ dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_battery', + 'entity_id': 'binary_sensor.x_wing_motion', 'state': None, - 'unit_of_measurement': '%', + 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_speed', + 'entity_id': 'binary_sensor.x_wing_status', 'state': None, - 'unit_of_measurement': 'kn', + 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_altitude', + 'entity_id': 'device_tracker.x_wing', + 'state': None, + 'unit_of_measurement': None, + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_address', + 'state': None, + 'unit_of_measurement': None, + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_altitude', 'state': None, 'unit_of_measurement': 'm', }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_address', + 'entity_id': 'sensor.x_wing_battery', + 'state': None, + 'unit_of_measurement': '%', + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_geofence', 'state': None, 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_geofence', + 'entity_id': 'sensor.x_wing_speed', 'state': None, - 'unit_of_measurement': None, - }), - dict({ - 'disabled': True, - 'enity_id': 'device_tracker.x_wing', - 'state': None, - 'unit_of_measurement': None, + 'unit_of_measurement': 'kn', }), ]), 'subscription_status': 'disconnected', @@ -345,37 +378,19 @@ 'entities': list([ dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_battery', - 'state': None, - 'unit_of_measurement': '%', - }), - dict({ - 'disabled': True, - 'enity_id': 'sensor.x_wing_speed', - 'state': None, - 'unit_of_measurement': 'kn', - }), - dict({ - 'disabled': True, - 'enity_id': 'sensor.x_wing_altitude', - 'state': None, - 'unit_of_measurement': 'm', - }), - dict({ - 'disabled': True, - 'enity_id': 'sensor.x_wing_address', + 'entity_id': 'binary_sensor.x_wing_motion', 'state': None, 'unit_of_measurement': None, }), dict({ 'disabled': True, - 'enity_id': 'sensor.x_wing_geofence', + 'entity_id': 'binary_sensor.x_wing_status', 'state': None, 'unit_of_measurement': None, }), dict({ 'disabled': False, - 'enity_id': 'device_tracker.x_wing', + 'entity_id': 'device_tracker.x_wing', 'state': dict({ 'attributes': dict({ 'category': 'starfighter', @@ -384,9 +399,7 @@ 'gps_accuracy': 3.5, 'latitude': '**REDACTED**', 'longitude': '**REDACTED**', - 'motion': False, 'source_type': 'gps', - 'status': 'online', 'traccar_id': 0, 'tracker': 'traccar_server', }), @@ -394,6 +407,36 @@ }), 'unit_of_measurement': None, }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_address', + 'state': None, + 'unit_of_measurement': None, + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_altitude', + 'state': None, + 'unit_of_measurement': 'm', + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_battery', + 'state': None, + 'unit_of_measurement': '%', + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_geofence', + 'state': None, + 'unit_of_measurement': None, + }), + dict({ + 'disabled': True, + 'entity_id': 'sensor.x_wing_speed', + 'state': None, + 'unit_of_measurement': 'kn', + }), ]), 'subscription_status': 'disconnected', }) diff --git a/tests/components/traccar_server/test_diagnostics.py b/tests/components/traccar_server/test_diagnostics.py index 493f0ae92d1..9019cd0ebf1 100644 --- a/tests/components/traccar_server/test_diagnostics.py +++ b/tests/components/traccar_server/test_diagnostics.py @@ -33,6 +33,10 @@ async def test_entry_diagnostics( hass_client, mock_config_entry, ) + # Sort the list of entities + result["entities"] = sorted( + result["entities"], key=lambda entity: entity["entity_id"] + ) assert result == snapshot(name="entry") @@ -64,13 +68,17 @@ async def test_device_diagnostics( device_id=device.id, include_disabled_entities=True, ) - # Enable all entitits to show everything in snapshots + # Enable all entities to show everything in snapshots for entity in entities: entity_registry.async_update_entity(entity.entity_id, disabled_by=None) result = await get_diagnostics_for_device( hass, hass_client, mock_config_entry, device=device ) + # Sort the list of entities + result["entities"] = sorted( + result["entities"], key=lambda entity: entity["entity_id"] + ) assert result == snapshot(name=device.name) @@ -110,5 +118,9 @@ async def test_device_diagnostics_with_disabled_entity( result = await get_diagnostics_for_device( hass, hass_client, mock_config_entry, device=device ) + # Sort the list of entities + result["entities"] = sorted( + result["entities"], key=lambda entity: entity["entity_id"] + ) assert result == snapshot(name=device.name) diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index 5c5d882b721..f2cfb6a109f 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -133,12 +133,12 @@ async def test_get_trace( ) -> None: """Test tracing a script or automation.""" await async_setup_component(hass, "homeassistant", {}) - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -429,12 +429,12 @@ async def test_restore_traces( ) -> None: """Test restored traces.""" hass.set_state(CoreState.not_running) - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id saved_traces = json.loads(load_fixture(f"trace/{domain}_saved_traces.json")) hass_storage["trace.saved_traces"] = saved_traces @@ -522,7 +522,7 @@ async def test_trace_overflow( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain, stored_traces ) -> None: """Test the number of stored traces per script or automation is limited.""" - id = 1 + msg_id = 1 trace_uuids = [] @@ -532,9 +532,9 @@ async def test_trace_overflow( return trace_uuids[-1] def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -601,7 +601,7 @@ async def test_restore_traces_overflow( ) -> None: """Test restored traces are evicted first.""" hass.set_state(CoreState.not_running) - id = 1 + msg_id = 1 trace_uuids = [] @@ -611,9 +611,9 @@ async def test_restore_traces_overflow( return trace_uuids[-1] def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id saved_traces = json.loads(load_fixture(f"trace/{domain}_saved_traces.json")) hass_storage["trace.saved_traces"] = saved_traces @@ -682,7 +682,7 @@ async def test_restore_traces_late_overflow( ) -> None: """Test restored traces are evicted first.""" hass.set_state(CoreState.not_running) - id = 1 + msg_id = 1 trace_uuids = [] @@ -692,9 +692,9 @@ async def test_restore_traces_late_overflow( return trace_uuids[-1] def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id saved_traces = json.loads(load_fixture(f"trace/{domain}_saved_traces.json")) hass_storage["trace.saved_traces"] = saved_traces @@ -743,12 +743,12 @@ async def test_trace_no_traces( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain ) -> None: """Test the storing traces for a script or automation can be disabled.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -810,12 +810,12 @@ async def test_list_traces( ) -> None: """Test listing script and automation traces.""" await async_setup_component(hass, "homeassistant", {}) - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -943,12 +943,12 @@ async def test_nested_traces( extra_trace_keys, ) -> None: """Test nested automation and script traces.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id sun_config = { "id": "sun", @@ -1003,12 +1003,12 @@ async def test_breakpoints( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain, prefix ) -> None: """Test script and automation breakpoints.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_last_step(item_id, expected_action, expected_state): await client.send_json( @@ -1173,12 +1173,12 @@ async def test_breakpoints_2( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain, prefix ) -> None: """Test execution resumes and breakpoints are removed after subscription removed.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_last_step(item_id, expected_action, expected_state): await client.send_json( @@ -1278,12 +1278,12 @@ async def test_breakpoints_3( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, domain, prefix ) -> None: """Test breakpoints can be cleared.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id async def assert_last_step(item_id, expected_action, expected_state): await client.send_json( @@ -1434,12 +1434,12 @@ async def test_script_mode( script_execution, ) -> None: """Test overlapping runs with max_runs > 1.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id flag = asyncio.Event() @@ -1502,12 +1502,12 @@ async def test_script_mode_2( script_execution, ) -> None: """Test overlapping runs with max_runs > 1.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id flag = asyncio.Event() @@ -1577,12 +1577,12 @@ async def test_trace_blueprint_automation( ) -> None: """Test trace of blueprint automation.""" await async_setup_component(hass, "homeassistant", {}) - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id domain = "automation" sun_config = { diff --git a/tests/components/tradfri/conftest.py b/tests/components/tradfri/conftest.py index 9ddac769c1f..73cfea59ce1 100644 --- a/tests/components/tradfri/conftest.py +++ b/tests/components/tradfri/conftest.py @@ -96,13 +96,13 @@ def device( return device -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def air_purifier() -> str: """Return an air purifier response.""" return load_fixture("air_purifier.json", DOMAIN) -@pytest.fixture(scope="session") +@pytest.fixture(scope="package") def blind() -> str: """Return a blind response.""" return load_fixture("blind.json", DOMAIN) diff --git a/tests/components/twinkly/test_init.py b/tests/components/twinkly/test_init.py index 794d4d5e773..6642807ac3f 100644 --- a/tests/components/twinkly/test_init.py +++ b/tests/components/twinkly/test_init.py @@ -17,16 +17,16 @@ async def test_load_unload_entry(hass: HomeAssistant) -> None: """Validate that setup entry also configure the client.""" client = ClientMock() - id = str(uuid4()) + device_id = str(uuid4()) config_entry = MockConfigEntry( domain=TWINKLY_DOMAIN, data={ CONF_HOST: TEST_HOST, - CONF_ID: id, + CONF_ID: device_id, CONF_NAME: TEST_NAME_ORIGINAL, CONF_MODEL: TEST_MODEL, }, - entry_id=id, + entry_id=device_id, ) config_entry.add_to_hass(hass) diff --git a/tests/components/unifi/test_init.py b/tests/components/unifi/test_init.py index 9053b47cbaf..bd9a29f2c8b 100644 --- a/tests/components/unifi/test_init.py +++ b/tests/components/unifi/test_init.py @@ -3,10 +3,20 @@ from typing import Any from unittest.mock import patch +from aiounifi.models.message import MessageKey + +from homeassistant import loader from homeassistant.components import unifi -from homeassistant.components.unifi.const import DOMAIN as UNIFI_DOMAIN +from homeassistant.components.unifi.const import ( + CONF_ALLOW_BANDWIDTH_SENSORS, + CONF_ALLOW_UPTIME_SENSORS, + CONF_TRACK_CLIENTS, + CONF_TRACK_DEVICES, + DOMAIN as UNIFI_DOMAIN, +) from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from .test_hub import DEFAULT_CONFIG_ENTRY_ID, setup_unifi_integration @@ -103,3 +113,91 @@ async def test_wireless_clients( "00:00:00:00:00:01", "00:00:00:00:00:02", ] + + +async def test_remove_config_entry_device( + hass: HomeAssistant, + hass_storage: dict[str, Any], + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, + mock_unifi_websocket, +) -> None: + """Verify removing a device manually.""" + client_1 = { + "hostname": "Wired client", + "is_wired": True, + "mac": "00:00:00:00:00:01", + "oui": "Producer", + "wired-rx_bytes": 1234000000, + "wired-tx_bytes": 5678000000, + "uptime": 1600094505, + } + client_2 = { + "is_wired": False, + "mac": "00:00:00:00:00:02", + "name": "Wireless client", + "oui": "Producer", + "rx_bytes": 2345000000, + "tx_bytes": 6789000000, + "uptime": 60, + } + device_1 = { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device 1", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "version": "4.0.42.10433", + } + options = { + CONF_ALLOW_BANDWIDTH_SENSORS: True, + CONF_ALLOW_UPTIME_SENSORS: True, + CONF_TRACK_CLIENTS: True, + CONF_TRACK_DEVICES: True, + } + + config_entry = await setup_unifi_integration( + hass, + aioclient_mock, + options=options, + clients_response=[client_1, client_2], + devices_response=[device_1], + ) + + integration = await loader.async_get_integration(hass, config_entry.domain) + component = await integration.async_get_component() + + # Remove a client + mock_unifi_websocket(message=MessageKey.CLIENT_REMOVED, data=[client_2]) + await hass.async_block_till_done() + + # Try to remove an active client: not allowed + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, client_1["mac"])} + ) + assert not await component.async_remove_config_entry_device( + hass, config_entry, device_entry + ) + # Try to remove an active device: not allowed + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, device_1["mac"])} + ) + assert not await component.async_remove_config_entry_device( + hass, config_entry, device_entry + ) + # Try to remove an inactive client: allowed + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, client_2["mac"])} + ) + assert await component.async_remove_config_entry_device( + hass, config_entry, device_entry + ) diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index e8f9f763409..26eadfa498e 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1000,77 +1000,6 @@ async def test_device_state( assert hass.states.get("sensor.device_state").state == DEVICE_STATES[i] -async def test_wlan_password( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_unifi_websocket, - websocket_mock, -) -> None: - """Test the WLAN password sensor behavior.""" - await setup_unifi_integration(hass, aioclient_mock, wlans_response=[WLAN]) - - sensor_password = "sensor.ssid_1_password" - password = "password" - new_password = "new_password" - - ent_reg_entry = entity_registry.async_get(sensor_password) - assert ent_reg_entry.unique_id == "password-012345678910111213141516" - assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - - # Enable entity - entity_registry.async_update_entity(entity_id=sensor_password, disabled_by=None) - await hass.async_block_till_done() - - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), - ) - await hass.async_block_till_done() - - # Validate state object - wlan_password_sensor_1 = hass.states.get(sensor_password) - assert wlan_password_sensor_1.state == password - - # Update state object - same password - no change to state - mock_unifi_websocket(message=MessageKey.WLAN_CONF_UPDATED, data=WLAN) - await hass.async_block_till_done() - wlan_password_sensor_2 = hass.states.get(sensor_password) - assert wlan_password_sensor_1.state == wlan_password_sensor_2.state - - # Update state object - changed password - new state - data = deepcopy(WLAN) - data["x_passphrase"] = new_password - mock_unifi_websocket(message=MessageKey.WLAN_CONF_UPDATED, data=data) - await hass.async_block_till_done() - wlan_password_sensor_3 = hass.states.get(sensor_password) - assert wlan_password_sensor_1.state != wlan_password_sensor_3.state - - # Availability signaling - - # Controller disconnects - await websocket_mock.disconnect() - assert hass.states.get(sensor_password).state == STATE_UNAVAILABLE - - # Controller reconnects - await websocket_mock.reconnect() - assert hass.states.get(sensor_password).state == new_password - - # WLAN gets disabled - wlan_1 = deepcopy(WLAN) - wlan_1["enabled"] = False - mock_unifi_websocket(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) - await hass.async_block_till_done() - assert hass.states.get(sensor_password).state == STATE_UNAVAILABLE - - # WLAN gets re-enabled - wlan_1["enabled"] = True - mock_unifi_websocket(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) - await hass.async_block_till_done() - assert hass.states.get(sensor_password).state == password - - async def test_device_system_stats( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -1113,3 +1042,152 @@ async def test_device_system_stats( assert hass.states.get("sensor.device_cpu_utilization").state == "7.7" assert hass.states.get("sensor.device_memory_utilization").state == "33.3" + + +async def test_bandwidth_port_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, + mock_unifi_websocket, +) -> None: + """Verify that port bandwidth sensors are working as expected.""" + device_reponse = { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "port_table": [ + { + "media": "GE", + "name": "Port 1", + "port_idx": 1, + "poe_class": "Class 4", + "poe_enable": False, + "poe_mode": "auto", + "poe_power": "2.56", + "poe_voltage": "53.40", + "portconf_id": "1a1", + "port_poe": False, + "up": True, + "rx_bytes-r": 1151, + "tx_bytes-r": 5111, + }, + { + "media": "GE", + "name": "Port 2", + "port_idx": 2, + "poe_class": "Class 4", + "poe_enable": False, + "poe_mode": "auto", + "poe_power": "2.56", + "poe_voltage": "53.40", + "portconf_id": "1a2", + "port_poe": False, + "up": True, + "rx_bytes-r": 1536, + "tx_bytes-r": 3615, + }, + ], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + options = { + CONF_ALLOW_BANDWIDTH_SENSORS: True, + CONF_ALLOW_UPTIME_SENSORS: False, + CONF_TRACK_CLIENTS: False, + CONF_TRACK_DEVICES: False, + } + + config_entry = await setup_unifi_integration( + hass, + aioclient_mock, + options=options, + devices_response=[device_reponse], + ) + + assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + p1rx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_rx") + assert p1rx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert p1rx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC + + p1tx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_tx") + assert p1tx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert p1tx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC + + # Enable entity + entity_registry.async_update_entity( + entity_id="sensor.mock_name_port_1_rx", disabled_by=None + ) + entity_registry.async_update_entity( + entity_id="sensor.mock_name_port_1_tx", disabled_by=None + ) + entity_registry.async_update_entity( + entity_id="sensor.mock_name_port_2_rx", disabled_by=None + ) + entity_registry.async_update_entity( + entity_id="sensor.mock_name_port_2_tx", disabled_by=None + ) + await hass.async_block_till_done() + + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + # Validate state object + assert len(hass.states.async_all()) == 9 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 + + # Verify sensor attributes and state + p1rx_sensor = hass.states.get("sensor.mock_name_port_1_rx") + assert p1rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p1rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p1rx_sensor.state == "0.00921" + + p1tx_sensor = hass.states.get("sensor.mock_name_port_1_tx") + assert p1tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p1tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p1tx_sensor.state == "0.04089" + + p2rx_sensor = hass.states.get("sensor.mock_name_port_2_rx") + assert p2rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p2rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p2rx_sensor.state == "0.01229" + + p2tx_sensor = hass.states.get("sensor.mock_name_port_2_tx") + assert p2tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE + assert p2tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + assert p2tx_sensor.state == "0.02892" + + # Verify state update + device_reponse["port_table"][0]["rx_bytes-r"] = 3456000000 + device_reponse["port_table"][0]["tx_bytes-r"] = 7891000000 + + mock_unifi_websocket(message=MessageKey.DEVICE, data=device_reponse) + await hass.async_block_till_done() + + assert hass.states.get("sensor.mock_name_port_1_rx").state == "27648.00000" + assert hass.states.get("sensor.mock_name_port_1_tx").state == "63128.00000" + + # Disable option + options[CONF_ALLOW_BANDWIDTH_SENSORS] = False + hass.config_entries.async_update_entry(config_entry, options=options.copy()) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + assert hass.states.get("sensor.mock_name_uptime") + assert hass.states.get("sensor.mock_name_state") + assert hass.states.get("sensor.mock_name_port_1_rx") is None + assert hass.states.get("sensor.mock_name_port_1_tx") is None + assert hass.states.get("sensor.mock_name_port_2_rx") is None + assert hass.states.get("sensor.mock_name_port_2_tx") is None diff --git a/tests/components/usb/__init__.py b/tests/components/usb/__init__.py index f5f32336931..96d671d0958 100644 --- a/tests/components/usb/__init__.py +++ b/tests/components/usb/__init__.py @@ -26,3 +26,19 @@ electro_lama_device = USBDevice( manufacturer=None, description="USB2.0-Serial", ) +skyconnect_macos_correct = USBDevice( + device="/dev/cu.SLAB_USBtoUART", + vid="10C4", + pid="EA60", + serial_number="9ab1da1ea4b3ed11956f4eaca7669f5d", + manufacturer="Nabu Casa", + description="SkyConnect v1.0", +) +skyconnect_macos_incorrect = USBDevice( + device="/dev/cu.usbserial-2110", + vid="10C4", + pid="EA60", + serial_number="9ab1da1ea4b3ed11956f4eaca7669f5d", + manufacturer="Nabu Casa", + description="SkyConnect v1.0", +) diff --git a/tests/components/vizio/conftest.py b/tests/components/vizio/conftest.py index 6ce36b38c8f..783ed8b4585 100644 --- a/tests/components/vizio/conftest.py +++ b/tests/components/vizio/conftest.py @@ -37,7 +37,7 @@ class MockInput: def get_mock_inputs(input_list): """Return list of MockInput.""" - return [MockInput(input) for input in input_list] + return [MockInput(device_input) for device_input in input_list] @pytest.fixture(name="vizio_get_unique_id", autouse=True) diff --git a/tests/components/vizio/test_media_player.py b/tests/components/vizio/test_media_player.py index d5ce18eb8b9..8cc734b9188 100644 --- a/tests/components/vizio/test_media_player.py +++ b/tests/components/vizio/test_media_player.py @@ -28,6 +28,8 @@ from homeassistant.components.media_player import ( ATTR_SOUND_MODE, DOMAIN as MP_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, @@ -443,6 +445,8 @@ async def test_services( "eq", "Music", ) + await _test_service(hass, MP_DOMAIN, "play", SERVICE_MEDIA_PLAY, None) + await _test_service(hass, MP_DOMAIN, "pause", SERVICE_MEDIA_PAUSE, None) async def test_options_update( diff --git a/tests/components/withings/test_init.py b/tests/components/withings/test_init.py index ff0a098a7cb..3ade0fb7c3a 100644 --- a/tests/components/withings/test_init.py +++ b/tests/components/withings/test_init.py @@ -5,6 +5,7 @@ from typing import Any from unittest.mock import AsyncMock, patch from urllib.parse import urlparse +from aiohttp import ClientConnectionError from aiohttp.hdrs import METH_HEAD from aiowithings import ( NotificationCategory, @@ -425,6 +426,110 @@ async def test_cloud_disconnect( assert withings.subscribe_notification.call_count == 12 +async def test_internet_disconnect( + hass: HomeAssistant, + withings: AsyncMock, + webhook_config_entry: MockConfigEntry, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we can recover from internet disconnects.""" + await mock_cloud(hass) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), + patch.object(cloud, "async_is_connected", return_value=True), + patch.object(cloud, "async_active_subscription", return_value=True), + patch( + "homeassistant.components.cloud.async_create_cloudhook", + return_value="https://hooks.nabu.casa/ABCD", + ), + patch( + "homeassistant.components.withings.async_get_config_entry_implementation", + ), + patch( + "homeassistant.components.cloud.async_delete_cloudhook", + ), + patch( + "homeassistant.components.withings.webhook_generate_url", + ), + ): + await setup_integration(hass, webhook_config_entry) + await prepare_webhook_setup(hass, freezer) + + assert cloud.async_active_subscription(hass) is True + assert cloud.async_is_connected(hass) is True + assert withings.revoke_notification_configurations.call_count == 3 + assert withings.subscribe_notification.call_count == 6 + + await hass.async_block_till_done() + + withings.list_notification_configurations.side_effect = ClientConnectionError + + async_mock_cloud_connection_status(hass, False) + await hass.async_block_till_done() + + assert withings.revoke_notification_configurations.call_count == 3 + + async_mock_cloud_connection_status(hass, True) + await hass.async_block_till_done() + + assert withings.subscribe_notification.call_count == 12 + + +async def test_cloud_disconnect_retry( + hass: HomeAssistant, + withings: AsyncMock, + webhook_config_entry: MockConfigEntry, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we retry to create webhook connection again after cloud disconnects.""" + await mock_cloud(hass) + await hass.async_block_till_done() + + with ( + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), + patch.object(cloud, "async_is_connected", return_value=True), + patch.object( + cloud, "async_active_subscription", return_value=True + ) as mock_async_active_subscription, + patch( + "homeassistant.components.cloud.async_create_cloudhook", + return_value="https://hooks.nabu.casa/ABCD", + ), + patch( + "homeassistant.components.withings.async_get_config_entry_implementation", + ), + patch( + "homeassistant.components.cloud.async_delete_cloudhook", + ), + patch( + "homeassistant.components.withings.webhook_generate_url", + ), + ): + await setup_integration(hass, webhook_config_entry) + await prepare_webhook_setup(hass, freezer) + + assert cloud.async_active_subscription(hass) is True + assert cloud.async_is_connected(hass) is True + assert mock_async_active_subscription.call_count == 3 + + await hass.async_block_till_done() + + async_mock_cloud_connection_status(hass, False) + await hass.async_block_till_done() + + assert mock_async_active_subscription.call_count == 3 + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_async_active_subscription.call_count == 4 + + @pytest.mark.parametrize( ("body", "expected_code"), [ diff --git a/tests/components/withings/test_sensor.py b/tests/components/withings/test_sensor.py index 72da4b9d973..8966006e47f 100644 --- a/tests/components/withings/test_sensor.py +++ b/tests/components/withings/test_sensor.py @@ -21,7 +21,7 @@ from . import ( setup_integration, ) -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.freeze_time("2023-10-21") @@ -36,15 +36,10 @@ async def test_all_entities( """Test all entities.""" with patch("homeassistant.components.withings.PLATFORMS", [Platform.SENSOR]): await setup_integration(hass, polling_config_entry) - entity_entries = er.async_entries_for_config_entry( - entity_registry, polling_config_entry.entry_id - ) - assert entity_entries - for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") - assert (state := hass.states.get(entity_entry.entity_id)) - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + await snapshot_platform( + hass, entity_registry, snapshot, polling_config_entry.entry_id + ) async def test_update_failed( diff --git a/tests/components/yale_smart_alarm/conftest.py b/tests/components/yale_smart_alarm/conftest.py index 816fc922411..211367a2922 100644 --- a/tests/components/yale_smart_alarm/conftest.py +++ b/tests/components/yale_smart_alarm/conftest.py @@ -56,7 +56,7 @@ async def load_config_entry( return (config_entry, client) -@pytest.fixture(name="load_json", scope="session") +@pytest.fixture(name="load_json", scope="package") def load_json_from_fixture() -> dict[str, Any]: """Load fixture with json data and return.""" diff --git a/tests/components/zha/test_repairs.py b/tests/components/zha/test_repairs.py index 5e128cc464a..5b57ec7fcc2 100644 --- a/tests/components/zha/test_repairs.py +++ b/tests/components/zha/test_repairs.py @@ -12,7 +12,7 @@ from zigpy.application import ControllerApplication import zigpy.backups from zigpy.exceptions import NetworkSettingsInconsistent -from homeassistant.components.homeassistant_sky_connect import ( +from homeassistant.components.homeassistant_sky_connect.const import ( DOMAIN as SKYCONNECT_DOMAIN, ) from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN @@ -59,8 +59,10 @@ def test_detect_radio_hardware(hass: HomeAssistant) -> None: "pid": "EA60", "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", "manufacturer": "Nabu Casa", - "description": "SkyConnect v1.0", + "product": "SkyConnect v1.0", + "firmware": "ezsp", }, + version=2, domain=SKYCONNECT_DOMAIN, options={}, title="Home Assistant SkyConnect", @@ -74,8 +76,10 @@ def test_detect_radio_hardware(hass: HomeAssistant) -> None: "pid": "EA60", "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", "manufacturer": "Nabu Casa", - "description": "Home Assistant Connect ZBT-1", + "product": "Home Assistant Connect ZBT-1", + "firmware": "ezsp", }, + version=2, domain=SKYCONNECT_DOMAIN, options={}, title="Home Assistant Connect ZBT-1", diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index dbf7357d4a0..db92b89cf81 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -241,19 +241,19 @@ def create_backup_fixture(): # State fixtures -@pytest.fixture(name="controller_state", scope="session") +@pytest.fixture(name="controller_state", scope="package") def controller_state_fixture(): """Load the controller state fixture data.""" return json.loads(load_fixture("zwave_js/controller_state.json")) -@pytest.fixture(name="controller_node_state", scope="session") +@pytest.fixture(name="controller_node_state", scope="package") def controller_node_state_fixture(): """Load the controller node state fixture data.""" return json.loads(load_fixture("zwave_js/controller_node_state.json")) -@pytest.fixture(name="version_state", scope="session") +@pytest.fixture(name="version_state", scope="package") def version_state_fixture(): """Load the version state fixture data.""" return { @@ -276,67 +276,67 @@ def log_config_state_fixture(): } -@pytest.fixture(name="config_entry_diagnostics", scope="session") +@pytest.fixture(name="config_entry_diagnostics", scope="package") def config_entry_diagnostics_fixture(): """Load the config entry diagnostics fixture data.""" return json.loads(load_fixture("zwave_js/config_entry_diagnostics.json")) -@pytest.fixture(name="config_entry_diagnostics_redacted", scope="session") +@pytest.fixture(name="config_entry_diagnostics_redacted", scope="package") def config_entry_diagnostics_redacted_fixture(): """Load the redacted config entry diagnostics fixture data.""" return json.loads(load_fixture("zwave_js/config_entry_diagnostics_redacted.json")) -@pytest.fixture(name="multisensor_6_state", scope="session") +@pytest.fixture(name="multisensor_6_state", scope="package") def multisensor_6_state_fixture(): """Load the multisensor 6 node state fixture data.""" return json.loads(load_fixture("zwave_js/multisensor_6_state.json")) -@pytest.fixture(name="ecolink_door_sensor_state", scope="session") +@pytest.fixture(name="ecolink_door_sensor_state", scope="package") def ecolink_door_sensor_state_fixture(): """Load the Ecolink Door/Window Sensor node state fixture data.""" return json.loads(load_fixture("zwave_js/ecolink_door_sensor_state.json")) -@pytest.fixture(name="hank_binary_switch_state", scope="session") +@pytest.fixture(name="hank_binary_switch_state", scope="package") def binary_switch_state_fixture(): """Load the hank binary switch node state fixture data.""" return json.loads(load_fixture("zwave_js/hank_binary_switch_state.json")) -@pytest.fixture(name="bulb_6_multi_color_state", scope="session") +@pytest.fixture(name="bulb_6_multi_color_state", scope="package") def bulb_6_multi_color_state_fixture(): """Load the bulb 6 multi-color node state fixture data.""" return json.loads(load_fixture("zwave_js/bulb_6_multi_color_state.json")) -@pytest.fixture(name="light_color_null_values_state", scope="session") +@pytest.fixture(name="light_color_null_values_state", scope="package") def light_color_null_values_state_fixture(): """Load the light color null values node state fixture data.""" return json.loads(load_fixture("zwave_js/light_color_null_values_state.json")) -@pytest.fixture(name="eaton_rf9640_dimmer_state", scope="session") +@pytest.fixture(name="eaton_rf9640_dimmer_state", scope="package") def eaton_rf9640_dimmer_state_fixture(): """Load the eaton rf9640 dimmer node state fixture data.""" return json.loads(load_fixture("zwave_js/eaton_rf9640_dimmer_state.json")) -@pytest.fixture(name="lock_schlage_be469_state", scope="session") +@pytest.fixture(name="lock_schlage_be469_state", scope="package") def lock_schlage_be469_state_fixture(): """Load the schlage lock node state fixture data.""" return json.loads(load_fixture("zwave_js/lock_schlage_be469_state.json")) -@pytest.fixture(name="lock_august_asl03_state", scope="session") +@pytest.fixture(name="lock_august_asl03_state", scope="package") def lock_august_asl03_state_fixture(): """Load the August Pro lock node state fixture data.""" return json.loads(load_fixture("zwave_js/lock_august_asl03_state.json")) -@pytest.fixture(name="climate_radio_thermostat_ct100_plus_state", scope="session") +@pytest.fixture(name="climate_radio_thermostat_ct100_plus_state", scope="package") def climate_radio_thermostat_ct100_plus_state_fixture(): """Load the climate radio thermostat ct100 plus node state fixture data.""" return json.loads( @@ -346,7 +346,7 @@ def climate_radio_thermostat_ct100_plus_state_fixture(): @pytest.fixture( name="climate_radio_thermostat_ct100_plus_different_endpoints_state", - scope="session", + scope="package", ) def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture(): """Load the thermostat fixture state with values on different endpoints. @@ -360,13 +360,13 @@ def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture(): ) -@pytest.fixture(name="climate_adc_t3000_state", scope="session") +@pytest.fixture(name="climate_adc_t3000_state", scope="package") def climate_adc_t3000_state_fixture(): """Load the climate ADC-T3000 node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_adc_t3000_state.json")) -@pytest.fixture(name="climate_airzone_aidoo_control_hvac_unit_state", scope="session") +@pytest.fixture(name="climate_airzone_aidoo_control_hvac_unit_state", scope="package") def climate_airzone_aidoo_control_hvac_unit_state_fixture(): """Load the climate Airzone Aidoo Control HVAC Unit state fixture data.""" return json.loads( @@ -374,37 +374,37 @@ def climate_airzone_aidoo_control_hvac_unit_state_fixture(): ) -@pytest.fixture(name="climate_danfoss_lc_13_state", scope="session") +@pytest.fixture(name="climate_danfoss_lc_13_state", scope="package") def climate_danfoss_lc_13_state_fixture(): """Load Danfoss (LC-13) electronic radiator thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_danfoss_lc_13_state.json")) -@pytest.fixture(name="climate_eurotronic_spirit_z_state", scope="session") +@pytest.fixture(name="climate_eurotronic_spirit_z_state", scope="package") def climate_eurotronic_spirit_z_state_fixture(): """Load the climate Eurotronic Spirit Z thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_eurotronic_spirit_z_state.json")) -@pytest.fixture(name="climate_heatit_z_trm6_state", scope="session") +@pytest.fixture(name="climate_heatit_z_trm6_state", scope="package") def climate_heatit_z_trm6_state_fixture(): """Load the climate HEATIT Z-TRM6 thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_heatit_z_trm6_state.json")) -@pytest.fixture(name="climate_heatit_z_trm3_state", scope="session") +@pytest.fixture(name="climate_heatit_z_trm3_state", scope="package") def climate_heatit_z_trm3_state_fixture(): """Load the climate HEATIT Z-TRM3 thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_heatit_z_trm3_state.json")) -@pytest.fixture(name="climate_heatit_z_trm2fx_state", scope="session") +@pytest.fixture(name="climate_heatit_z_trm2fx_state", scope="package") def climate_heatit_z_trm2fx_state_fixture(): """Load the climate HEATIT Z-TRM2fx thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/climate_heatit_z_trm2fx_state.json")) -@pytest.fixture(name="climate_heatit_z_trm3_no_value_state", scope="session") +@pytest.fixture(name="climate_heatit_z_trm3_no_value_state", scope="package") def climate_heatit_z_trm3_no_value_state_fixture(): """Load the climate HEATIT Z-TRM3 thermostat node w/no value state fixture data.""" return json.loads( @@ -412,134 +412,134 @@ def climate_heatit_z_trm3_no_value_state_fixture(): ) -@pytest.fixture(name="nortek_thermostat_state", scope="session") +@pytest.fixture(name="nortek_thermostat_state", scope="package") def nortek_thermostat_state_fixture(): """Load the nortek thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/nortek_thermostat_state.json")) -@pytest.fixture(name="srt321_hrt4_zw_state", scope="session") +@pytest.fixture(name="srt321_hrt4_zw_state", scope="package") def srt321_hrt4_zw_state_fixture(): """Load the climate HRT4-ZW / SRT321 / SRT322 thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/srt321_hrt4_zw_state.json")) -@pytest.fixture(name="chain_actuator_zws12_state", scope="session") +@pytest.fixture(name="chain_actuator_zws12_state", scope="package") def window_cover_state_fixture(): """Load the window cover node state fixture data.""" return json.loads(load_fixture("zwave_js/chain_actuator_zws12_state.json")) -@pytest.fixture(name="fan_generic_state", scope="session") +@pytest.fixture(name="fan_generic_state", scope="package") def fan_generic_state_fixture(): """Load the fan node state fixture data.""" return json.loads(load_fixture("zwave_js/fan_generic_state.json")) -@pytest.fixture(name="hs_fc200_state", scope="session") +@pytest.fixture(name="hs_fc200_state", scope="package") def hs_fc200_state_fixture(): """Load the HS FC200+ node state fixture data.""" return json.loads(load_fixture("zwave_js/fan_hs_fc200_state.json")) -@pytest.fixture(name="leviton_zw4sf_state", scope="session") +@pytest.fixture(name="leviton_zw4sf_state", scope="package") def leviton_zw4sf_state_fixture(): """Load the Leviton ZW4SF node state fixture data.""" return json.loads(load_fixture("zwave_js/leviton_zw4sf_state.json")) -@pytest.fixture(name="fan_honeywell_39358_state", scope="session") +@pytest.fixture(name="fan_honeywell_39358_state", scope="package") def fan_honeywell_39358_state_fixture(): """Load the fan node state fixture data.""" return json.loads(load_fixture("zwave_js/fan_honeywell_39358_state.json")) -@pytest.fixture(name="gdc_zw062_state", scope="session") +@pytest.fixture(name="gdc_zw062_state", scope="package") def motorized_barrier_cover_state_fixture(): """Load the motorized barrier cover node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_zw062_state.json")) -@pytest.fixture(name="iblinds_v2_state", scope="session") +@pytest.fixture(name="iblinds_v2_state", scope="package") def iblinds_v2_state_fixture(): """Load the iBlinds v2 node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_iblinds_v2_state.json")) -@pytest.fixture(name="iblinds_v3_state", scope="session") +@pytest.fixture(name="iblinds_v3_state", scope="package") def iblinds_v3_state_fixture(): """Load the iBlinds v3 node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_iblinds_v3_state.json")) -@pytest.fixture(name="qubino_shutter_state", scope="session") +@pytest.fixture(name="qubino_shutter_state", scope="package") def qubino_shutter_state_fixture(): """Load the Qubino Shutter node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_qubino_shutter_state.json")) -@pytest.fixture(name="aeotec_nano_shutter_state", scope="session") +@pytest.fixture(name="aeotec_nano_shutter_state", scope="package") def aeotec_nano_shutter_state_fixture(): """Load the Aeotec Nano Shutter node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_aeotec_nano_shutter_state.json")) -@pytest.fixture(name="fibaro_fgr222_shutter_state", scope="session") +@pytest.fixture(name="fibaro_fgr222_shutter_state", scope="package") def fibaro_fgr222_shutter_state_fixture(): """Load the Fibaro FGR222 node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_fibaro_fgr222_state.json")) -@pytest.fixture(name="fibaro_fgr223_shutter_state", scope="session") +@pytest.fixture(name="fibaro_fgr223_shutter_state", scope="package") def fibaro_fgr223_shutter_state_fixture(): """Load the Fibaro FGR223 node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_fibaro_fgr223_state.json")) -@pytest.fixture(name="merten_507801_state", scope="session") +@pytest.fixture(name="merten_507801_state", scope="package") def merten_507801_state_fixture(): """Load the Merten 507801 Shutter node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_merten_507801_state.json")) -@pytest.fixture(name="aeon_smart_switch_6_state", scope="session") +@pytest.fixture(name="aeon_smart_switch_6_state", scope="package") def aeon_smart_switch_6_state_fixture(): """Load the AEON Labs (ZW096) Smart Switch 6 node state fixture data.""" return json.loads(load_fixture("zwave_js/aeon_smart_switch_6_state.json")) -@pytest.fixture(name="ge_12730_state", scope="session") +@pytest.fixture(name="ge_12730_state", scope="package") def ge_12730_state_fixture(): """Load the GE 12730 node state fixture data.""" return json.loads(load_fixture("zwave_js/fan_ge_12730_state.json")) -@pytest.fixture(name="aeotec_radiator_thermostat_state", scope="session") +@pytest.fixture(name="aeotec_radiator_thermostat_state", scope="package") def aeotec_radiator_thermostat_state_fixture(): """Load the Aeotec Radiator Thermostat node state fixture data.""" return json.loads(load_fixture("zwave_js/aeotec_radiator_thermostat_state.json")) -@pytest.fixture(name="inovelli_lzw36_state", scope="session") +@pytest.fixture(name="inovelli_lzw36_state", scope="package") def inovelli_lzw36_state_fixture(): """Load the Inovelli LZW36 node state fixture data.""" return json.loads(load_fixture("zwave_js/inovelli_lzw36_state.json")) -@pytest.fixture(name="null_name_check_state", scope="session") +@pytest.fixture(name="null_name_check_state", scope="package") def null_name_check_state_fixture(): """Load the null name check node state fixture data.""" return json.loads(load_fixture("zwave_js/null_name_check_state.json")) -@pytest.fixture(name="lock_id_lock_as_id150_state", scope="session") +@pytest.fixture(name="lock_id_lock_as_id150_state", scope="package") def lock_id_lock_as_id150_state_fixture(): """Load the id lock id-150 lock node state fixture data.""" return json.loads(load_fixture("zwave_js/lock_id_lock_as_id150_state.json")) @pytest.fixture( - name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="session" + name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="package" ) def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture(): """Load the climate multiple temp units node state fixture data.""" @@ -554,7 +554,7 @@ def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture(): name=( "climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state" ), - scope="session", + scope="package", ) def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_state_fixture(): """Load climate device w/ mode+setpoint on diff endpoints node state fixture data.""" @@ -565,37 +565,37 @@ def climate_radio_thermostat_ct100_mode_and_setpoint_on_different_endpoints_stat ) -@pytest.fixture(name="vision_security_zl7432_state", scope="session") +@pytest.fixture(name="vision_security_zl7432_state", scope="package") def vision_security_zl7432_state_fixture(): """Load the vision security zl7432 switch node state fixture data.""" return json.loads(load_fixture("zwave_js/vision_security_zl7432_state.json")) -@pytest.fixture(name="zen_31_state", scope="session") +@pytest.fixture(name="zen_31_state", scope="package") def zem_31_state_fixture(): """Load the zen_31 node state fixture data.""" return json.loads(load_fixture("zwave_js/zen_31_state.json")) -@pytest.fixture(name="wallmote_central_scene_state", scope="session") +@pytest.fixture(name="wallmote_central_scene_state", scope="package") def wallmote_central_scene_state_fixture(): """Load the wallmote central scene node state fixture data.""" return json.loads(load_fixture("zwave_js/wallmote_central_scene_state.json")) -@pytest.fixture(name="ge_in_wall_dimmer_switch_state", scope="session") +@pytest.fixture(name="ge_in_wall_dimmer_switch_state", scope="package") def ge_in_wall_dimmer_switch_state_fixture(): """Load the ge in-wall dimmer switch node state fixture data.""" return json.loads(load_fixture("zwave_js/ge_in_wall_dimmer_switch_state.json")) -@pytest.fixture(name="aeotec_zw164_siren_state", scope="session") +@pytest.fixture(name="aeotec_zw164_siren_state", scope="package") def aeotec_zw164_siren_state_fixture(): """Load the aeotec zw164 siren node state fixture data.""" return json.loads(load_fixture("zwave_js/aeotec_zw164_siren_state.json")) -@pytest.fixture(name="lock_popp_electric_strike_lock_control_state", scope="session") +@pytest.fixture(name="lock_popp_electric_strike_lock_control_state", scope="package") def lock_popp_electric_strike_lock_control_state_fixture(): """Load the popp electric strike lock control node state fixture data.""" return json.loads( @@ -603,73 +603,73 @@ def lock_popp_electric_strike_lock_control_state_fixture(): ) -@pytest.fixture(name="fortrezz_ssa1_siren_state", scope="session") +@pytest.fixture(name="fortrezz_ssa1_siren_state", scope="package") def fortrezz_ssa1_siren_state_fixture(): """Load the fortrezz ssa1 siren node state fixture data.""" return json.loads(load_fixture("zwave_js/fortrezz_ssa1_siren_state.json")) -@pytest.fixture(name="fortrezz_ssa3_siren_state", scope="session") +@pytest.fixture(name="fortrezz_ssa3_siren_state", scope="package") def fortrezz_ssa3_siren_state_fixture(): """Load the fortrezz ssa3 siren node state fixture data.""" return json.loads(load_fixture("zwave_js/fortrezz_ssa3_siren_state.json")) -@pytest.fixture(name="zp3111_not_ready_state", scope="session") +@pytest.fixture(name="zp3111_not_ready_state", scope="package") def zp3111_not_ready_state_fixture(): """Load the zp3111 4-in-1 sensor not-ready node state fixture data.""" return json.loads(load_fixture("zwave_js/zp3111-5_not_ready_state.json")) -@pytest.fixture(name="zp3111_state", scope="session") +@pytest.fixture(name="zp3111_state", scope="package") def zp3111_state_fixture(): """Load the zp3111 4-in-1 sensor node state fixture data.""" return json.loads(load_fixture("zwave_js/zp3111-5_state.json")) -@pytest.fixture(name="express_controls_ezmultipli_state", scope="session") +@pytest.fixture(name="express_controls_ezmultipli_state", scope="package") def light_express_controls_ezmultipli_state_fixture(): """Load the Express Controls EZMultiPli node state fixture data.""" return json.loads(load_fixture("zwave_js/express_controls_ezmultipli_state.json")) -@pytest.fixture(name="lock_home_connect_620_state", scope="session") +@pytest.fixture(name="lock_home_connect_620_state", scope="package") def lock_home_connect_620_state_fixture(): """Load the Home Connect 620 lock node state fixture data.""" return json.loads(load_fixture("zwave_js/lock_home_connect_620_state.json")) -@pytest.fixture(name="switch_zooz_zen72_state", scope="session") +@pytest.fixture(name="switch_zooz_zen72_state", scope="package") def switch_zooz_zen72_state_fixture(): """Load the Zooz Zen72 switch node state fixture data.""" return json.loads(load_fixture("zwave_js/switch_zooz_zen72_state.json")) -@pytest.fixture(name="indicator_test_state", scope="session") +@pytest.fixture(name="indicator_test_state", scope="package") def indicator_test_state_fixture(): """Load the indicator CC test node state fixture data.""" return json.loads(load_fixture("zwave_js/indicator_test_state.json")) -@pytest.fixture(name="energy_production_state", scope="session") +@pytest.fixture(name="energy_production_state", scope="package") def energy_production_state_fixture(): """Load a mock node with energy production CC state fixture data.""" return json.loads(load_fixture("zwave_js/energy_production_state.json")) -@pytest.fixture(name="nice_ibt4zwave_state", scope="session") +@pytest.fixture(name="nice_ibt4zwave_state", scope="package") def nice_ibt4zwave_state_fixture(): """Load a Nice IBT4ZWAVE cover node state fixture data.""" return json.loads(load_fixture("zwave_js/cover_nice_ibt4zwave_state.json")) -@pytest.fixture(name="logic_group_zdb5100_state", scope="session") +@pytest.fixture(name="logic_group_zdb5100_state", scope="package") def logic_group_zdb5100_state_fixture(): """Load the Logic Group ZDB5100 node state fixture data.""" return json.loads(load_fixture("zwave_js/logic_group_zdb5100_state.json")) -@pytest.fixture(name="central_scene_node_state", scope="session") +@pytest.fixture(name="central_scene_node_state", scope="package") def central_scene_node_state_fixture(): """Load node with Central Scene CC node state fixture data.""" return json.loads(load_fixture("zwave_js/central_scene_node_state.json")) diff --git a/tests/components/zwave_js/test_diagnostics.py b/tests/components/zwave_js/test_diagnostics.py index 054906cd0f6..ea354ab80d3 100644 --- a/tests/components/zwave_js/test_diagnostics.py +++ b/tests/components/zwave_js/test_diagnostics.py @@ -128,7 +128,9 @@ async def test_device_diagnostics( ) assert diagnostics_data["state"] == { **multisensor_6.data, - "values": {id: val.data for id, val in multisensor_6.values.items()}, + "values": { + value_id: val.data for value_id, val in multisensor_6.values.items() + }, "endpoints": { str(idx): endpoint.data for idx, endpoint in multisensor_6.endpoints.items() }, diff --git a/tests/conftest.py b/tests/conftest.py index a38da17f44b..7efd4246a1f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -499,7 +499,7 @@ def aiohttp_client( elif isinstance(__param, BaseTestServer): client = TestClient(__param, loop=loop, **kwargs) else: - raise TypeError("Unknown argument type: %r" % type(__param)) + raise TypeError(f"Unknown argument type: {type(__param)!r}") await client.start_server() clients.append(client) @@ -542,8 +542,8 @@ async def hass( else: exceptions.append( Exception( - "Received exception handler without exception, but with message: %s" - % context["message"] + "Received exception handler without exception, " + f"but with message: {context["message"]}" ) ) orig_exception_handler(loop, context) @@ -904,26 +904,45 @@ def mqtt_client_mock(hass: HomeAssistant) -> Generator[MqttMockPahoClient, None, self.rc = 0 with patch("paho.mqtt.client.Client") as mock_client: + # The below use a call_soon for the on_publish/on_subscribe/on_unsubscribe + # callbacks to simulate the behavior of the real MQTT client which will + # not be synchronous. @ha.callback def _async_fire_mqtt_message(topic, payload, qos, retain): async_fire_mqtt_message(hass, topic, payload, qos, retain) mid = get_mid() - mock_client.on_publish(0, 0, mid) + hass.loop.call_soon(mock_client.on_publish, 0, 0, mid) return FakeInfo(mid) def _subscribe(topic, qos=0): mid = get_mid() - mock_client.on_subscribe(0, 0, mid) + hass.loop.call_soon(mock_client.on_subscribe, 0, 0, mid) return (0, mid) def _unsubscribe(topic): mid = get_mid() - mock_client.on_unsubscribe(0, 0, mid) + hass.loop.call_soon(mock_client.on_unsubscribe, 0, 0, mid) return (0, mid) + def _connect(*args, **kwargs): + # Connect always calls reconnect once, but we + # mock it out so we call reconnect to simulate + # the behavior. + mock_client.reconnect() + hass.loop.call_soon_threadsafe( + mock_client.on_connect, mock_client, None, 0, 0, 0 + ) + mock_client.on_socket_open( + mock_client, None, Mock(fileno=Mock(return_value=-1)) + ) + mock_client.on_socket_register_write( + mock_client, None, Mock(fileno=Mock(return_value=-1)) + ) + return 0 + mock_client = mock_client.return_value - mock_client.connect.return_value = 0 + mock_client.connect.side_effect = _connect mock_client.subscribe.side_effect = _subscribe mock_client.unsubscribe.side_effect = _unsubscribe mock_client.publish.side_effect = _async_fire_mqtt_message @@ -985,6 +1004,7 @@ async def _mqtt_mock_entry( # connected set to True to get a more realistic behavior when subscribing mock_mqtt_instance.connected = True + mqtt_client_mock.on_connect(mqtt_client_mock, None, 0, 0, 0) async_dispatcher_send(hass, mqtt.MQTT_CONNECTED) await hass.async_block_till_done() diff --git a/tests/helpers/test_condition.py b/tests/helpers/test_condition.py index 701bc342760..20dea85c3e4 100644 --- a/tests/helpers/test_condition.py +++ b/tests/helpers/test_condition.py @@ -2178,12 +2178,12 @@ def _find_run_id(traces, trace_type, item_id): async def assert_automation_condition_trace(hass_ws_client, automation_id, expected): """Test the result of automation condition.""" - id = 1 + msg_id = 1 def next_id(): - nonlocal id - id += 1 - return id + nonlocal msg_id + msg_id += 1 + return msg_id client = await hass_ws_client() diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index bed3dea4dc1..ee895e3fd3e 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -11,7 +11,7 @@ from yarl import URL from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STARTED -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, HomeAssistant, ReleaseChannel from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( area_registry as ar, @@ -2390,7 +2390,7 @@ async def test_device_name_translation_placeholders( }, }, {"placeholder": "special"}, - "stable", + ReleaseChannel.STABLE, nullcontext(), ( "has translation placeholders '{'placeholder': 'special'}' which do " @@ -2405,7 +2405,7 @@ async def test_device_name_translation_placeholders( }, }, {"placeholder": "special"}, - "beta", + ReleaseChannel.BETA, pytest.raises( HomeAssistantError, match="Missing placeholder '2ndplaceholder'" ), @@ -2419,7 +2419,7 @@ async def test_device_name_translation_placeholders( }, }, None, - "stable", + ReleaseChannel.STABLE, nullcontext(), ( "has translation placeholders '{}' which do " @@ -2434,7 +2434,7 @@ async def test_device_name_translation_placeholders_errors( translation_key: str | None, translations: dict[str, str] | None, placeholders: dict[str, str] | None, - release_channel: str, + release_channel: ReleaseChannel, expectation: AbstractContextManager, expected_error: str, caplog: pytest.LogCaptureFixture, diff --git a/tests/helpers/test_dispatcher.py b/tests/helpers/test_dispatcher.py index 149231a9368..d9a79cc6a7a 100644 --- a/tests/helpers/test_dispatcher.py +++ b/tests/helpers/test_dispatcher.py @@ -239,3 +239,24 @@ async def test_dispatcher_add_dispatcher(hass: HomeAssistant) -> None: async_dispatcher_send(hass, "test", 5) assert calls == [3, 4, 4, 5, 5] + + +async def test_thread_safety_checks(hass: HomeAssistant) -> None: + """Test dispatcher thread safety checks.""" + hass.config.debug = True + calls = [] + + @callback + def _dispatcher(data): + calls.append(data) + + async_dispatcher_connect(hass, "test", _dispatcher) + + with pytest.raises( + RuntimeError, + match="Detected code that calls async_dispatcher_send from a thread.", + ): + await hass.async_add_executor_job(async_dispatcher_send, hass, "test", 3) + + async_dispatcher_send(hass, "test", 4) + assert calls == [4] diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 70d917dbc7b..a80674e0f76 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -28,6 +28,7 @@ from homeassistant.core import ( HassJobType, HomeAssistant, HomeAssistantError, + ReleaseChannel, callback, ) from homeassistant.helpers import device_registry as dr, entity, entity_registry as er @@ -1249,7 +1250,7 @@ async def test_entity_name_translation_placeholders( }, }, {"placeholder": "special"}, - "stable", + ReleaseChannel.STABLE, ( "has translation placeholders '{'placeholder': 'special'}' which do " "not match the name '{placeholder} English ent {2ndplaceholder}'" @@ -1263,7 +1264,7 @@ async def test_entity_name_translation_placeholders( }, }, {"placeholder": "special"}, - "beta", + ReleaseChannel.BETA, "HomeAssistantError: Missing placeholder '2ndplaceholder'", ), ( @@ -1274,7 +1275,7 @@ async def test_entity_name_translation_placeholders( }, }, None, - "stable", + ReleaseChannel.STABLE, ( "has translation placeholders '{}' which do " "not match the name '{placeholder} English ent'" @@ -1287,7 +1288,7 @@ async def test_entity_name_translation_placeholder_errors( translation_key: str | None, translations: dict[str, str] | None, placeholders: dict[str, str] | None, - release_channel: str, + release_channel: ReleaseChannel, expected_error: str, caplog: pytest.LogCaptureFixture, ) -> None: @@ -2329,30 +2330,30 @@ async def test_cached_entity_properties( async def test_cached_entity_property_delete_attr(hass: HomeAssistant) -> None: """Test deleting an _attr corresponding to a cached property.""" - property = "has_entity_name" + property_name = "has_entity_name" ent = entity.Entity() - assert not hasattr(ent, f"_attr_{property}") + assert not hasattr(ent, f"_attr_{property_name}") with pytest.raises(AttributeError): - delattr(ent, f"_attr_{property}") - assert getattr(ent, property) is False + delattr(ent, f"_attr_{property_name}") + assert getattr(ent, property_name) is False with pytest.raises(AttributeError): - delattr(ent, f"_attr_{property}") - assert not hasattr(ent, f"_attr_{property}") - assert getattr(ent, property) is False + delattr(ent, f"_attr_{property_name}") + assert not hasattr(ent, f"_attr_{property_name}") + assert getattr(ent, property_name) is False - setattr(ent, f"_attr_{property}", True) - assert getattr(ent, property) is True + setattr(ent, f"_attr_{property_name}", True) + assert getattr(ent, property_name) is True - delattr(ent, f"_attr_{property}") - assert not hasattr(ent, f"_attr_{property}") - assert getattr(ent, property) is False + delattr(ent, f"_attr_{property_name}") + assert not hasattr(ent, f"_attr_{property_name}") + assert getattr(ent, property_name) is False async def test_cached_entity_property_class_attribute(hass: HomeAssistant) -> None: """Test entity properties on class level work in derived classes.""" - property = "attribution" + property_name = "attribution" values = ["abcd", "efgh"] class EntityWithClassAttribute1(entity.Entity): @@ -2407,15 +2408,15 @@ async def test_cached_entity_property_class_attribute(hass: HomeAssistant) -> No ] for ent in entities: - assert getattr(ent[0], property) == values[0] - assert getattr(ent[1], property) == values[0] + assert getattr(ent[0], property_name) == values[0] + assert getattr(ent[1], property_name) == values[0] # Test update for ent in entities: - setattr(ent[0], f"_attr_{property}", values[1]) + setattr(ent[0], f"_attr_{property_name}", values[1]) for ent in entities: - assert getattr(ent[0], property) == values[1] - assert getattr(ent[1], property) == values[0] + assert getattr(ent[0], property_name) == values[1] + assert getattr(ent[1], property_name) == values[0] async def test_cached_entity_property_override(hass: HomeAssistant) -> None: @@ -2593,3 +2594,50 @@ async def test_get_hassjob_type(hass: HomeAssistant) -> None: assert ent_1.get_hassjob_type("update") is HassJobType.Executor assert ent_1.get_hassjob_type("async_update") is HassJobType.Coroutinefunction assert ent_1.get_hassjob_type("update_callback") is HassJobType.Callback + + +async def test_async_write_ha_state_thread_safety(hass: HomeAssistant) -> None: + """Test async_write_ha_state thread safety.""" + hass.config.debug = True + + ent = entity.Entity() + ent.entity_id = "test.any" + ent.hass = hass + ent.async_write_ha_state() + assert hass.states.get(ent.entity_id) + + ent2 = entity.Entity() + ent2.entity_id = "test.any2" + ent2.hass = hass + with pytest.raises( + RuntimeError, + match="Detected code that calls async_write_ha_state from a thread.", + ): + await hass.async_add_executor_job(ent2.async_write_ha_state) + assert not hass.states.get(ent2.entity_id) + + +async def test_async_write_ha_state_thread_safety_custom_component( + hass: HomeAssistant, +) -> None: + """Test async_write_ha_state thread safe for custom components.""" + + ent = entity.Entity() + ent._is_custom_component = True + ent.entity_id = "test.any" + ent.hass = hass + ent.platform = MockEntityPlatform(hass, domain="test") + ent.async_write_ha_state() + assert hass.states.get(ent.entity_id) + + ent2 = entity.Entity() + ent2._is_custom_component = True + ent2.entity_id = "test.any2" + ent2.hass = hass + ent2.platform = MockEntityPlatform(hass, domain="test") + with pytest.raises( + RuntimeError, + match="Detected code that calls async_write_ha_state from a thread.", + ): + await hass.async_add_executor_job(ent2.async_write_ha_state) + assert not hass.states.get(ent2.entity_id) diff --git a/tests/helpers/test_frame.py b/tests/helpers/test_frame.py index fe215264f59..904bed965c8 100644 --- a/tests/helpers/test_frame.py +++ b/tests/helpers/test_frame.py @@ -205,3 +205,45 @@ async def test_report_missing_integration_frame( frame.report(what, error_if_core=False, log_custom_component_only=True) assert caplog.text == "" + + +@pytest.mark.parametrize("run_count", [1, 2]) +# Run this twice to make sure the flood check does not +# kick in when error_if_integration=True +async def test_report_error_if_integration( + caplog: pytest.LogCaptureFixture, run_count: int +) -> None: + """Test RuntimeError is raised if error_if_integration is set.""" + frames = extract_stack_to_frame( + [ + Mock( + filename="/home/paulus/homeassistant/core.py", + lineno="23", + line="do_something()", + ), + Mock( + filename="/home/paulus/homeassistant/components/hue/light.py", + lineno="23", + line="self.light.is_on", + ), + Mock( + filename="/home/paulus/aiohue/lights.py", + lineno="2", + line="something()", + ), + ] + ) + with ( + patch( + "homeassistant.helpers.frame.get_current_frame", + return_value=frames, + ), + pytest.raises( + RuntimeError, + match=( + "Detected that integration 'hue' did a bad" + " thing at homeassistant/components/hue/light.py" + ), + ), + ): + frame.report("did a bad thing", error_if_integration=True) diff --git a/tests/helpers/test_network.py b/tests/helpers/test_network.py index caffebf094e..3c9594bca38 100644 --- a/tests/helpers/test_network.py +++ b/tests/helpers/test_network.py @@ -362,6 +362,18 @@ async def test_get_url_external(hass: HomeAssistant) -> None: with pytest.raises(NoURLAvailableError): _get_external_url(hass, require_current_request=True, require_ssl=True) + with pytest.raises(NoURLAvailableError): + _get_external_url(hass, require_cloud=True) + + with patch( + "homeassistant.components.cloud.async_remote_ui_url", + return_value="https://example.nabu.casa", + ): + hass.config.components.add("cloud") + assert ( + _get_external_url(hass, require_cloud=True) == "https://example.nabu.casa" + ) + async def test_get_cloud_url(hass: HomeAssistant) -> None: """Test getting an instance URL when the user has set an external URL.""" diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 9d8170f9953..3d662e772e8 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -1311,6 +1311,184 @@ async def test_wait_timeout( assert_action_trace(expected_trace) +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_trigger_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait trigger with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = { + "wait_for_trigger": { + "platform": "state", + "entity_id": "switch.test", + "to": "off", + } + } + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "on") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_trigger_matches_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait trigger that matches with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = { + "wait_for_trigger": { + "platform": "state", + "entity_id": "switch.test", + "to": "off", + } + } + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "off") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + + variable_wait = {"wait": {"trigger": None, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_template_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait template with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = {"wait_template": "{{ states.switch.test.state == 'off' }}"} + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "on") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + variable_wait = {"wait": {"completed": False, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + +@pytest.mark.parametrize( + "timeout_param", [0, "{{ 0 }}", {"minutes": 0}, {"minutes": "{{ 0 }}"}] +) +async def test_wait_template_matches_with_zero_timeout( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, timeout_param: int | str +) -> None: + """Test the wait template that matches with zero timeout option.""" + event = "test_event" + events = async_capture_events(hass, event) + action = {"wait_template": "{{ states.switch.test.state == 'off' }}"} + action["timeout"] = timeout_param + action["continue_on_timeout"] = True + sequence = cv.SCRIPT_SCHEMA([action, {"event": event}]) + sequence = await script.async_validate_actions_config(hass, sequence) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + wait_started_flag = async_watch_for_action(script_obj, "wait") + hass.states.async_set("switch.test", "off") + hass.async_create_task(script_obj.async_run(context=Context())) + + try: + await asyncio.wait_for(wait_started_flag.wait(), 1) + except (AssertionError, TimeoutError): + await script_obj.async_stop() + raise + + assert not script_obj.is_running + assert len(events) == 1 + assert "(timeout: 0:00:00)" in caplog.text + variable_wait = {"wait": {"completed": True, "remaining": 0.0}} + expected_trace = { + "0": [ + { + "result": variable_wait, + "variables": variable_wait, + } + ], + "1": [{"result": {"event": "test_event", "event_data": {}}}], + } + assert_action_trace(expected_trace) + + @pytest.mark.parametrize( ("continue_on_timeout", "n_events"), [(False, 0), (True, 1), (None, 1)] ) diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 524b8f47dfe..ae9dcbe50d5 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -707,7 +707,7 @@ def test_multiply(hass: HomeAssistant) -> None: for inp, out in tests.items(): assert ( template.Template( - "{{ %s | multiply(10) | round }}" % inp, hass + f"{{{{ {inp} | multiply(10) | round }}}}", hass ).async_render() == out ) @@ -775,7 +775,9 @@ def test_sine(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | sin | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | sin | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ sin({value}) | round(3) }}}}") == expected @@ -805,7 +807,9 @@ def test_cos(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | cos | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | cos | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ cos({value}) | round(3) }}}}") == expected @@ -835,7 +839,9 @@ def test_tan(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | tan | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | tan | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ tan({value}) | round(3) }}}}") == expected @@ -865,7 +871,9 @@ def test_sqrt(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | sqrt | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | sqrt | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ sqrt({value}) | round(3) }}}}") == expected @@ -895,7 +903,9 @@ def test_arc_sine(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | asin | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | asin | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ asin({value}) | round(3) }}}}") == expected @@ -909,7 +919,9 @@ def test_arc_sine(hass: HomeAssistant) -> None: for value in invalid_tests: with pytest.raises(TemplateError): - template.Template("{{ %s | asin | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | asin | round(3) }}}}", hass + ).async_render() with pytest.raises(TemplateError): assert render(hass, f"{{{{ asin({value}) | round(3) }}}}") @@ -932,7 +944,9 @@ def test_arc_cos(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | acos | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | acos | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ acos({value}) | round(3) }}}}") == expected @@ -946,7 +960,9 @@ def test_arc_cos(hass: HomeAssistant) -> None: for value in invalid_tests: with pytest.raises(TemplateError): - template.Template("{{ %s | acos | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | acos | round(3) }}}}", hass + ).async_render() with pytest.raises(TemplateError): assert render(hass, f"{{{{ acos({value}) | round(3) }}}}") @@ -973,7 +989,9 @@ def test_arc_tan(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | atan | round(3) }}" % value, hass).async_render() + template.Template( + f"{{{{ {value} | atan | round(3) }}}}", hass + ).async_render() == expected ) assert render(hass, f"{{{{ atan({value}) | round(3) }}}}") == expected @@ -1122,7 +1140,7 @@ def test_timestamp_local(hass: HomeAssistant) -> None: for inp, out in tests: assert ( - template.Template("{{ %s | timestamp_local }}" % inp, hass).async_render() + template.Template(f"{{{{ {inp} | timestamp_local }}}}", hass).async_render() == out ) @@ -1133,7 +1151,7 @@ def test_timestamp_local(hass: HomeAssistant) -> None: for inp in invalid_tests: with pytest.raises(TemplateError): - template.Template("{{ %s | timestamp_local }}" % inp, hass).async_render() + template.Template(f"{{{{ {inp} | timestamp_local }}}}", hass).async_render() # Test handling of default return value assert render(hass, "{{ None | timestamp_local(1) }}") == 1 @@ -1198,6 +1216,35 @@ def test_as_datetime_from_timestamp( ) +@pytest.mark.parametrize( + ("input", "output"), + [ + ( + "{% set dt = as_datetime('2024-01-01 16:00:00-08:00') %}", + "2024-01-01 16:00:00-08:00", + ), + ( + "{% set dt = as_datetime('2024-01-29').date() %}", + "2024-01-29 00:00:00", + ), + ], +) +def test_as_datetime_from_datetime( + hass: HomeAssistant, input: str, output: str +) -> None: + """Test using datetime.datetime or datetime.date objects as input.""" + + assert ( + template.Template(f"{input}{{{{ dt | as_datetime }}}}", hass).async_render() + == output + ) + + assert ( + template.Template(f"{input}{{{{ as_datetime(dt) }}}}", hass).async_render() + == output + ) + + @pytest.mark.parametrize( ("input", "default", "output"), [ @@ -1587,7 +1634,7 @@ def test_ordinal(hass: HomeAssistant) -> None: for value, expected in tests: assert ( - template.Template("{{ %s | ordinal }}" % value, hass).async_render() + template.Template(f"{{{{ {value} | ordinal }}}}", hass).async_render() == expected ) @@ -1602,7 +1649,7 @@ def test_timestamp_utc(hass: HomeAssistant) -> None: for inp, out in tests: assert ( - template.Template("{{ %s | timestamp_utc }}" % inp, hass).async_render() + template.Template(f"{{{{ {inp} | timestamp_utc }}}}", hass).async_render() == out ) @@ -1613,7 +1660,7 @@ def test_timestamp_utc(hass: HomeAssistant) -> None: for inp in invalid_tests: with pytest.raises(TemplateError): - template.Template("{{ %s | timestamp_utc }}" % inp, hass).async_render() + template.Template(f"{{{{ {inp} | timestamp_utc }}}}", hass).async_render() # Test handling of default return value assert render(hass, "{{ None | timestamp_utc(1) }}") == 1 @@ -2220,7 +2267,6 @@ def test_relative_time(mock_is_safe, hass: HomeAssistant) -> None: hass, ).async_render() assert result == "1 hour" - result = template.Template( ( "{{" @@ -2275,10 +2321,369 @@ def test_relative_time(mock_is_safe, hass: HomeAssistant) -> None: ).async_render() assert result == "string" + # Test behavior when current time is same as the input time + result = template.Template( + ( + "{{" + " relative_time(" + " strptime(" + ' "2000-01-01 10:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "0 seconds" + + # Test behavior when the input time is in the future + result = template.Template( + ( + "{{" + " relative_time(" + " strptime(" + ' "2000-01-01 11:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2000-01-01 11:00:00+00:00" + info = template.Template(relative_time_template, hass).async_render_to_info() assert info.has_time is True +@patch( + "homeassistant.helpers.template.TemplateEnvironment.is_safe_callable", + return_value=True, +) +def test_time_since(mock_is_safe, hass: HomeAssistant) -> None: + """Test time_since method.""" + hass.config.set_time_zone("UTC") + now = datetime.strptime("2000-01-01 10:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + time_since_template = ( + '{{time_since(strptime("2000-01-01 09:00:00", "%Y-%m-%d %H:%M:%S"))}}' + ) + with freeze_time(now): + result = template.Template( + time_since_template, + hass, + ).async_render() + assert result == "1 hour" + + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 09:00:00 +01:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2 hours" + + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 03:00:00 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour" + + result1 = str( + template.strptime("2000-01-01 11:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + ) + result2 = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 11:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 2" + " )" + "}}" + ), + hass, + ).async_render() + assert result1 == result2 + + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 09:05:00 +01:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision=2" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour 55 minutes" + + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 02:05:27 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 3" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour 54 minutes 33 seconds" + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 02:05:27 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z")' + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2 hours" + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "1999-02-01 02:05:27 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 0" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "11 months 4 days 1 hour 54 minutes 33 seconds" + result = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "1999-02-01 02:05:27 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z")' + " )" + "}}" + ), + hass, + ).async_render() + assert result == "11 months" + result1 = str( + template.strptime("2000-01-01 11:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + ) + result2 = template.Template( + ( + "{{" + " time_since(" + " strptime(" + ' "2000-01-01 11:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision=3" + " )" + "}}" + ), + hass, + ).async_render() + assert result1 == result2 + + result = template.Template( + '{{time_since("string")}}', + hass, + ).async_render() + assert result == "string" + + info = template.Template(time_since_template, hass).async_render_to_info() + assert info.has_time is True + + +@patch( + "homeassistant.helpers.template.TemplateEnvironment.is_safe_callable", + return_value=True, +) +def test_time_until(mock_is_safe, hass: HomeAssistant) -> None: + """Test time_until method.""" + hass.config.set_time_zone("UTC") + now = datetime.strptime("2000-01-01 10:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + time_until_template = ( + '{{time_until(strptime("2000-01-01 11:00:00", "%Y-%m-%d %H:%M:%S"))}}' + ) + with freeze_time(now): + result = template.Template( + time_until_template, + hass, + ).async_render() + assert result == "1 hour" + + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 13:00:00 +01:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2 hours" + + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 05:00:00 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"' + " )" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour" + + result1 = str( + template.strptime("2000-01-01 09:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + ) + result2 = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 09:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 2" + " )" + "}}" + ), + hass, + ).async_render() + assert result1 == result2 + + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 12:05:00 +01:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision=2" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour 5 minutes" + + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 05:54:33 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 3" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 hour 54 minutes 33 seconds" + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 05:54:33 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z")' + " )" + "}}" + ), + hass, + ).async_render() + assert result == "2 hours" + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2001-02-01 05:54:33 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 0" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 year 1 month 2 days 1 hour 54 minutes 33 seconds" + result = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2001-02-01 05:54:33 -06:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision = 4" + " )" + "}}" + ), + hass, + ).async_render() + assert result == "1 year 1 month 2 days 2 hours" + result1 = str( + template.strptime("2000-01-01 09:00:00 +00:00", "%Y-%m-%d %H:%M:%S %z") + ) + result2 = template.Template( + ( + "{{" + " time_until(" + " strptime(" + ' "2000-01-01 09:00:00 +00:00",' + ' "%Y-%m-%d %H:%M:%S %z"),' + " precision=3" + " )" + "}}" + ), + hass, + ).async_render() + assert result1 == result2 + + result = template.Template( + '{{time_until("string")}}', + hass, + ).async_render() + assert result == "string" + + info = template.Template(time_until_template, hass).async_render_to_info() + assert info.has_time is True + + @patch( "homeassistant.helpers.template.TemplateEnvironment.is_safe_callable", return_value=True, @@ -4231,7 +4636,9 @@ def test_closest_function_invalid_state(hass: HomeAssistant) -> None: for state in ("states.zone.non_existing", '"zone.non_existing"'): assert ( - template.Template("{{ closest(%s, states) }}" % state, hass).async_render() + template.Template( + f"{{{{ closest({state}, states) }}}}", hass + ).async_render() is None ) @@ -5728,3 +6135,20 @@ async def test_label_areas( info = render_to_info(hass, f"{{{{ '{label.name}' | label_areas }}}}") assert_result_info(info, [master_bedroom.id]) assert info.rate_limit is None + + +async def test_template_thread_safety_checks(hass: HomeAssistant) -> None: + """Test template thread safety checks.""" + hass.states.async_set("sensor.test", "23") + template_str = "{{ states('sensor.test') }}" + template_obj = template.Template(template_str, None) + template_obj.hass = hass + hass.config.debug = True + + with pytest.raises( + RuntimeError, + match="Detected code that calls async_render_to_info from a thread.", + ): + await hass.async_add_executor_job(template_obj.async_render_to_info) + + assert template_obj.async_render_to_info().result() == 23 diff --git a/tests/script/__init__.py b/tests/script/__init__.py new file mode 100644 index 00000000000..209299782c9 --- /dev/null +++ b/tests/script/__init__.py @@ -0,0 +1 @@ +"""Tests for scripts.""" diff --git a/tests/script/test_gen_requirements_all.py b/tests/script/test_gen_requirements_all.py new file mode 100644 index 00000000000..793b3de63c5 --- /dev/null +++ b/tests/script/test_gen_requirements_all.py @@ -0,0 +1,25 @@ +"""Tests for the gen_requirements_all script.""" + +from script import gen_requirements_all + + +def test_overrides_normalized() -> None: + """Test override lists are using normalized package names.""" + for req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL: + assert req == gen_requirements_all._normalize_package_name(req) + for req in gen_requirements_all.INCLUDED_REQUIREMENTS_WHEELS: + assert req == gen_requirements_all._normalize_package_name(req) + for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): + for req in overrides["exclude"]: + assert req == gen_requirements_all._normalize_package_name(req) + for req in overrides["include"]: + assert req == gen_requirements_all._normalize_package_name(req) + + +def test_include_overrides_subsets() -> None: + """Test packages in include override lists are present in the exclude list.""" + for req in gen_requirements_all.INCLUDED_REQUIREMENTS_WHEELS: + assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL + for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): + for req in overrides["include"]: + assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 12eb52c06f4..96caf5d10c8 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -13,7 +13,7 @@ import pytest from homeassistant import bootstrap, loader, runner import homeassistant.config as config_util from homeassistant.config_entries import HANDLERS, ConfigEntry -from homeassistant.const import SIGNAL_BOOTSTRAP_INTEGRATIONS +from homeassistant.const import CONF_DEBUG, SIGNAL_BOOTSTRAP_INTEGRATIONS from homeassistant.core import CoreState, HomeAssistant, async_get_hass, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -44,7 +44,7 @@ async def apply_stop_hass(stop_hass: None) -> None: """Make sure all hass are stopped.""" -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="module", autouse=True) def mock_http_start_stop() -> Generator[None, None, None]: """Mock HTTP start and stop.""" with ( @@ -112,6 +112,48 @@ async def test_empty_setup(hass: HomeAssistant) -> None: assert domain in hass.config.components, domain +@pytest.mark.parametrize("load_registries", [False]) +async def test_config_does_not_turn_off_debug(hass: HomeAssistant) -> None: + """Test that config does not turn off debug if its turned on by runtime config.""" + # Mock that its turned on from RuntimeConfig + hass.config.debug = True + + await bootstrap.async_from_config_dict({CONF_DEBUG: False}, hass) + assert hass.config.debug is True + + +@pytest.mark.parametrize("hass_config", [{"frontend": {}}]) +async def test_asyncio_debug_on_turns_hass_debug_on( + mock_hass_config: None, + mock_enable_logging: Mock, + mock_is_virtual_env: Mock, + mock_mount_local_lib_path: AsyncMock, + mock_ensure_config_exists: AsyncMock, + mock_process_ha_config_upgrade: Mock, +) -> None: + """Test that asyncio debug turns on hass debug.""" + asyncio.get_running_loop().set_debug(True) + + verbose = Mock() + log_rotate_days = Mock() + log_file = Mock() + log_no_color = Mock() + + hass = await bootstrap.async_setup_hass( + runner.RuntimeConfig( + config_dir=get_test_config_dir(), + verbose=verbose, + log_rotate_days=log_rotate_days, + log_file=log_file, + log_no_color=log_no_color, + skip_pip=True, + recovery_mode=False, + ), + ) + + assert hass.config.debug is True + + @pytest.mark.parametrize("load_registries", [False]) async def test_preload_translations(hass: HomeAssistant) -> None: """Test translations are preloaded for all frontend deps and base platforms.""" @@ -599,6 +641,7 @@ async def test_setup_hass( log_no_color=log_no_color, skip_pip=True, recovery_mode=False, + debug=True, ), ) @@ -619,6 +662,9 @@ async def test_setup_hass( assert len(mock_ensure_config_exists.mock_calls) == 1 assert len(mock_process_ha_config_upgrade.mock_calls) == 1 + # debug in RuntimeConfig should set it it in hass.config + assert hass.config.debug is True + assert hass == async_get_hass() diff --git a/tests/test_config.py b/tests/test_config.py index defd6a1018b..58529fb0057 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -857,6 +857,7 @@ async def test_loading_configuration(hass: HomeAssistant) -> None: "internal_url": "http://example.local", "media_dirs": {"mymedia": "/usr"}, "legacy_templates": True, + "debug": True, "currency": "EUR", "country": "SE", "language": "sv", @@ -877,6 +878,7 @@ async def test_loading_configuration(hass: HomeAssistant) -> None: assert hass.config.media_dirs == {"mymedia": "/usr"} assert hass.config.config_source is ConfigSource.YAML assert hass.config.legacy_templates is True + assert hass.config.debug is True assert hass.config.currency == "EUR" assert hass.config.country == "SE" assert hass.config.language == "sv" diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 63dea5ea735..68f770631ed 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -4504,24 +4504,86 @@ def test_raise_trying_to_add_same_config_entry_twice( assert f"An entry with the id {entry.entry_id} already exists" in caplog.text +@pytest.mark.parametrize( + ( + "title", + "unique_id", + "data_vendor", + "options_vendor", + "kwargs", + "calls_entry_load_unload", + ), + [ + ( + ("Test", "Updated title"), + ("1234", "5678"), + ("data", "data2"), + ("options", "options2"), + {}, + (2, 1), + ), + ( + ("Test", "Test"), + ("1234", "1234"), + ("data", "data"), + ("options", "options"), + {}, + (2, 1), + ), + ( + ("Test", "Updated title"), + ("1234", "5678"), + ("data", "data2"), + ("options", "options2"), + {"reload_even_if_entry_is_unchanged": True}, + (2, 1), + ), + ( + ("Test", "Test"), + ("1234", "1234"), + ("data", "data"), + ("options", "options"), + {"reload_even_if_entry_is_unchanged": False}, + (1, 0), + ), + ], + ids=[ + "changed_entry_default", + "unchanged_entry_default", + "changed_entry_explicit_reload", + "changed_entry_no_reload", + ], +) async def test_update_entry_and_reload( - hass: HomeAssistant, manager: config_entries.ConfigEntries + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + title: tuple[str, str], + unique_id: tuple[str, str], + data_vendor: tuple[str, str], + options_vendor: tuple[str, str], + kwargs: dict[str, Any], + calls_entry_load_unload: tuple[int, int], ) -> None: """Test updating an entry and reloading.""" entry = MockConfigEntry( domain="comp", - unique_id="1234", - title="Test", - data={"vendor": "data"}, - options={"vendor": "options"}, + unique_id=unique_id[0], + title=title[0], + data={"vendor": data_vendor[0]}, + options={"vendor": options_vendor[0]}, ) entry.add_to_hass(hass) - mock_integration( - hass, MockModule("comp", async_setup_entry=AsyncMock(return_value=True)) + comp = MockModule( + "comp", + async_setup_entry=AsyncMock(return_value=True), + async_unload_entry=AsyncMock(return_value=True), ) + mock_integration(hass, comp) mock_platform(hass, "comp.config_flow", None) + await hass.config_entries.async_setup(entry.entry_id) + class MockFlowHandler(config_entries.ConfigFlow): """Define a mock flow handler.""" @@ -4531,23 +4593,27 @@ async def test_update_entry_and_reload( """Mock Reauth.""" return self.async_update_reload_and_abort( entry=entry, - unique_id="5678", - title="Updated Title", - data={"vendor": "data2"}, - options={"vendor": "options2"}, + unique_id=unique_id[1], + title=title[1], + data={"vendor": data_vendor[1]}, + options={"vendor": options_vendor[1]}, + **kwargs, ) with patch.dict(config_entries.HANDLERS, {"comp": MockFlowHandler}): task = await manager.flow.async_init("comp", context={"source": "reauth"}) await hass.async_block_till_done() - assert entry.title == "Updated Title" - assert entry.unique_id == "5678" - assert entry.data == {"vendor": "data2"} - assert entry.options == {"vendor": "options2"} + assert entry.title == title[1] + assert entry.unique_id == unique_id[1] + assert entry.data == {"vendor": data_vendor[1]} + assert entry.options == {"vendor": options_vendor[1]} assert entry.state == config_entries.ConfigEntryState.LOADED assert task["type"] == FlowResultType.ABORT assert task["reason"] == "reauth_successful" + # Assert entry was reloaded + assert len(comp.async_setup_entry.mock_calls) == calls_entry_load_unload[0] + assert len(comp.async_unload_entry.mock_calls) == calls_entry_load_unload[1] @pytest.mark.parametrize("unique_id", [["blah", "bleh"], {"key": "value"}]) diff --git a/tests/test_core.py b/tests/test_core.py index 5d687d89833..a553d5bbbed 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -42,6 +42,7 @@ from homeassistant.core import ( CoreState, HassJob, HomeAssistant, + ReleaseChannel, ServiceCall, ServiceResponse, State, @@ -1147,11 +1148,11 @@ async def test_eventbus_filtered_listener(hass: HomeAssistant) -> None: calls.append(event) @ha.callback - def filter(event_data): + def mock_filter(event_data): """Mock filter.""" return not event_data["filtered"] - unsub = hass.bus.async_listen("test", listener, event_filter=filter) + unsub = hass.bus.async_listen("test", listener, event_filter=mock_filter) hass.bus.async_fire("test", {"filtered": True}) await hass.async_block_till_done() @@ -1802,9 +1803,9 @@ async def test_services_call_return_response_requires_blocking( blocking=False, return_response=True, ) - assert ( - str(exc.value) - == "A non blocking service call with argument blocking=False can't be used together with argument return_response=True" + assert str(exc.value) == ( + "A non blocking service call with argument blocking=False " + "can't be used together with argument return_response=True" ) @@ -1989,6 +1990,7 @@ async def test_config_as_dict() -> None: "country": None, "language": "en", "safe_mode": False, + "debug": False, } assert expected == config.as_dict() @@ -3060,13 +3062,15 @@ async def test_validate_state(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("version", "release_channel"), [ - ("0.115.0.dev20200815", "nightly"), - ("0.115.0", "stable"), - ("0.115.0b4", "beta"), - ("0.115.0dev0", "dev"), + ("0.115.0.dev20200815", ReleaseChannel.NIGHTLY), + ("0.115.0", ReleaseChannel.STABLE), + ("0.115.0b4", ReleaseChannel.BETA), + ("0.115.0dev0", ReleaseChannel.DEV), ], ) -async def test_get_release_channel(version: str, release_channel: str) -> None: +async def test_get_release_channel( + version: str, release_channel: ReleaseChannel +) -> None: """Test if release channel detection works from Home Assistant version number.""" with patch("homeassistant.core.__version__", f"{version}"): assert get_release_channel() == release_channel @@ -3271,11 +3275,11 @@ async def test_eventbus_lazy_object_creation(hass: HomeAssistant) -> None: calls.append(event) @ha.callback - def filter(event_data): + def mock_filter(event_data): """Mock filter.""" return not event_data["filtered"] - unsub = hass.bus.async_listen("test_1", listener, event_filter=filter) + unsub = hass.bus.async_listen("test_1", listener, event_filter=mock_filter) # Test lazy creation of Event objects with patch("homeassistant.core.Event") as mock_event: @@ -3340,7 +3344,7 @@ async def test_statemachine_report_state(hass: HomeAssistant) -> None: """Test report state event.""" @ha.callback - def filter(event_data): + def mock_filter(event_data): """Mock filter.""" return True @@ -3351,7 +3355,7 @@ async def test_statemachine_report_state(hass: HomeAssistant) -> None: hass.states.async_set("light.bowl", "on", {}) state_changed_events = async_capture_events(hass, EVENT_STATE_CHANGED) state_reported_events = [] - hass.bus.async_listen(EVENT_STATE_REPORTED, listener, event_filter=filter) + hass.bus.async_listen(EVENT_STATE_REPORTED, listener, event_filter=mock_filter) hass.states.async_set("light.bowl", "on") await hass.async_block_till_done() @@ -3382,7 +3386,7 @@ async def test_report_state_listener_restrictions(hass: HomeAssistant) -> None: """Mock listener.""" @ha.callback - def filter(event_data): + def mock_filter(event_data): """Mock filter.""" return False @@ -3391,7 +3395,7 @@ async def test_report_state_listener_restrictions(hass: HomeAssistant) -> None: hass.bus.async_listen(EVENT_STATE_REPORTED, listener) # Both filter and run_immediately - hass.bus.async_listen(EVENT_STATE_REPORTED, listener, event_filter=filter) + hass.bus.async_listen(EVENT_STATE_REPORTED, listener, event_filter=mock_filter) @pytest.mark.parametrize( @@ -3436,3 +3440,43 @@ async def test_top_level_components(hass: HomeAssistant) -> None: hass.config.components.remove("homeassistant.scene") with pytest.raises(NotImplementedError): hass.config.components.discard("homeassistant") + + +async def test_debug_mode_defaults_to_off(hass: HomeAssistant) -> None: + """Test debug mode defaults to off.""" + assert not hass.config.debug + + +async def test_async_fire_thread_safety(hass: HomeAssistant) -> None: + """Test async_fire thread safety.""" + events = async_capture_events(hass, "test_event") + hass.bus.async_fire("test_event") + with pytest.raises( + RuntimeError, match="Detected code that calls async_fire from a thread." + ): + await hass.async_add_executor_job(hass.bus.async_fire, "test_event") + + assert len(events) == 1 + + +async def test_async_register_thread_safety(hass: HomeAssistant) -> None: + """Test async_register thread safety.""" + with pytest.raises( + RuntimeError, match="Detected code that calls async_register from a thread." + ): + await hass.async_add_executor_job( + hass.services.async_register, + "test_domain", + "test_service", + lambda call: None, + ) + + +async def test_async_remove_thread_safety(hass: HomeAssistant) -> None: + """Test async_remove thread safety.""" + with pytest.raises( + RuntimeError, match="Detected code that calls async_remove from a thread." + ): + await hass.async_add_executor_job( + hass.services.async_remove, "test_domain", "test_service" + ) diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 5e113d3ba10..9d556b55b15 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -102,7 +102,7 @@ def test_template_message(arg: str | Exception, expected: str) -> None: ) async def test_home_assistant_error( hass: HomeAssistant, - exception_args: tuple[Any,], + exception_args: tuple[Any, ...], exception_kwargs: dict[str, Any], args_base_class: tuple[Any], message: str, diff --git a/tests/util/test_async.py b/tests/util/test_async.py index 157becc4b01..ac927b1375a 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -76,7 +76,8 @@ async def test_run_callback_threadsafe(hass: HomeAssistant) -> None: nonlocal it_ran it_ran = True - assert hasync.run_callback_threadsafe(hass.loop, callback) + with patch.dict(hass.loop.__dict__, {"_thread_ident": -1}): + assert hasync.run_callback_threadsafe(hass.loop, callback) assert it_ran is False # Verify that async_block_till_done will flush @@ -95,6 +96,7 @@ async def test_callback_is_always_scheduled(hass: HomeAssistant) -> None: hasync.shutdown_run_callback_threadsafe(hass.loop) with ( + patch.dict(hass.loop.__dict__, {"_thread_ident": -1}), patch.object(hass.loop, "call_soon_threadsafe") as mock_call_soon_threadsafe, pytest.raises(RuntimeError), ): diff --git a/tests/util/test_dt.py b/tests/util/test_dt.py index 7ed8154f033..215524c426b 100644 --- a/tests/util/test_dt.py +++ b/tests/util/test_dt.py @@ -178,12 +178,18 @@ def test_get_age() -> None: """Test get_age.""" diff = dt_util.now() - timedelta(seconds=0) assert dt_util.get_age(diff) == "0 seconds" + assert dt_util.get_age(diff, precision=2) == "0 seconds" diff = dt_util.now() - timedelta(seconds=1) assert dt_util.get_age(diff) == "1 second" + assert dt_util.get_age(diff, precision=2) == "1 second" + + diff = dt_util.now() + timedelta(seconds=1) + pytest.raises(ValueError, dt_util.get_age, diff) diff = dt_util.now() - timedelta(seconds=30) assert dt_util.get_age(diff) == "30 seconds" + diff = dt_util.now() + timedelta(seconds=30) diff = dt_util.now() - timedelta(minutes=5) assert dt_util.get_age(diff) == "5 minutes" @@ -196,20 +202,81 @@ def test_get_age() -> None: diff = dt_util.now() - timedelta(minutes=320) assert dt_util.get_age(diff) == "5 hours" + assert dt_util.get_age(diff, precision=2) == "5 hours 20 minutes" + assert dt_util.get_age(diff, precision=3) == "5 hours 20 minutes" diff = dt_util.now() - timedelta(minutes=1.6 * 60 * 24) assert dt_util.get_age(diff) == "2 days" + assert dt_util.get_age(diff, precision=2) == "1 day 14 hours" + assert dt_util.get_age(diff, precision=3) == "1 day 14 hours 24 minutes" + diff = dt_util.now() + timedelta(minutes=1.6 * 60 * 24) + pytest.raises(ValueError, dt_util.get_age, diff) diff = dt_util.now() - timedelta(minutes=2 * 60 * 24) assert dt_util.get_age(diff) == "2 days" diff = dt_util.now() - timedelta(minutes=32 * 60 * 24) assert dt_util.get_age(diff) == "1 month" + assert dt_util.get_age(diff, precision=10) == "1 month 2 days" + + diff = dt_util.now() - timedelta(minutes=32 * 60 * 24 + 1) + assert dt_util.get_age(diff, precision=3) == "1 month 2 days 1 minute" diff = dt_util.now() - timedelta(minutes=365 * 60 * 24) assert dt_util.get_age(diff) == "1 year" +def test_time_remaining() -> None: + """Test get_age.""" + diff = dt_util.now() + timedelta(seconds=0) + assert dt_util.get_time_remaining(diff) == "0 seconds" + assert dt_util.get_time_remaining(diff) == "0 seconds" + assert dt_util.get_time_remaining(diff, precision=2) == "0 seconds" + + diff = dt_util.now() + timedelta(seconds=1) + assert dt_util.get_time_remaining(diff) == "1 second" + + diff = dt_util.now() - timedelta(seconds=1) + pytest.raises(ValueError, dt_util.get_time_remaining, diff) + + diff = dt_util.now() + timedelta(seconds=30) + assert dt_util.get_time_remaining(diff) == "30 seconds" + + diff = dt_util.now() + timedelta(minutes=5) + assert dt_util.get_time_remaining(diff) == "5 minutes" + + diff = dt_util.now() + timedelta(minutes=1) + assert dt_util.get_time_remaining(diff) == "1 minute" + + diff = dt_util.now() + timedelta(minutes=300) + assert dt_util.get_time_remaining(diff) == "5 hours" + + diff = dt_util.now() + timedelta(minutes=320) + assert dt_util.get_time_remaining(diff) == "5 hours" + assert dt_util.get_time_remaining(diff, precision=2) == "5 hours 20 minutes" + assert dt_util.get_time_remaining(diff, precision=3) == "5 hours 20 minutes" + + diff = dt_util.now() + timedelta(minutes=1.6 * 60 * 24) + assert dt_util.get_time_remaining(diff) == "2 days" + assert dt_util.get_time_remaining(diff, precision=2) == "1 day 14 hours" + assert dt_util.get_time_remaining(diff, precision=3) == "1 day 14 hours 24 minutes" + diff = dt_util.now() - timedelta(minutes=1.6 * 60 * 24) + pytest.raises(ValueError, dt_util.get_time_remaining, diff) + + diff = dt_util.now() + timedelta(minutes=2 * 60 * 24) + assert dt_util.get_time_remaining(diff) == "2 days" + + diff = dt_util.now() + timedelta(minutes=32 * 60 * 24) + assert dt_util.get_time_remaining(diff) == "1 month" + assert dt_util.get_time_remaining(diff, precision=10) == "1 month 2 days" + + diff = dt_util.now() + timedelta(minutes=32 * 60 * 24 + 1) + assert dt_util.get_time_remaining(diff, precision=3) == "1 month 2 days 1 minute" + + diff = dt_util.now() + timedelta(minutes=365 * 60 * 24) + assert dt_util.get_time_remaining(diff) == "1 year" + + def test_parse_time_expression() -> None: """Test parse_time_expression.""" assert list(range(60)) == dt_util.parse_time_expression("*", 0, 59) diff --git a/tests/util/test_percentage.py b/tests/util/test_percentage.py index 2fc054fb4f1..3af42310e94 100644 --- a/tests/util/test_percentage.py +++ b/tests/util/test_percentage.py @@ -104,77 +104,77 @@ async def test_percentage_to_ordered_list_item() -> None: async def test_ranged_value_to_percentage_large() -> None: """Test a large range of low and high values convert a single value to a percentage.""" - range = (1, 255) + value_range = (1, 255) - assert ranged_value_to_percentage(range, 255) == 100 - assert ranged_value_to_percentage(range, 127) == 49 - assert ranged_value_to_percentage(range, 10) == 3 - assert ranged_value_to_percentage(range, 1) == 0 + assert ranged_value_to_percentage(value_range, 255) == 100 + assert ranged_value_to_percentage(value_range, 127) == 49 + assert ranged_value_to_percentage(value_range, 10) == 3 + assert ranged_value_to_percentage(value_range, 1) == 0 async def test_percentage_to_ranged_value_large() -> None: """Test a large range of low and high values convert a percentage to a single value.""" - range = (1, 255) + value_range = (1, 255) - assert percentage_to_ranged_value(range, 100) == 255 - assert percentage_to_ranged_value(range, 50) == 127.5 - assert percentage_to_ranged_value(range, 4) == 10.2 + assert percentage_to_ranged_value(value_range, 100) == 255 + assert percentage_to_ranged_value(value_range, 50) == 127.5 + assert percentage_to_ranged_value(value_range, 4) == 10.2 - assert math.ceil(percentage_to_ranged_value(range, 100)) == 255 - assert math.ceil(percentage_to_ranged_value(range, 50)) == 128 - assert math.ceil(percentage_to_ranged_value(range, 4)) == 11 + assert math.ceil(percentage_to_ranged_value(value_range, 100)) == 255 + assert math.ceil(percentage_to_ranged_value(value_range, 50)) == 128 + assert math.ceil(percentage_to_ranged_value(value_range, 4)) == 11 async def test_ranged_value_to_percentage_small() -> None: """Test a small range of low and high values convert a single value to a percentage.""" - range = (1, 6) + value_range = (1, 6) - assert ranged_value_to_percentage(range, 1) == 16 - assert ranged_value_to_percentage(range, 2) == 33 - assert ranged_value_to_percentage(range, 3) == 50 - assert ranged_value_to_percentage(range, 4) == 66 - assert ranged_value_to_percentage(range, 5) == 83 - assert ranged_value_to_percentage(range, 6) == 100 + assert ranged_value_to_percentage(value_range, 1) == 16 + assert ranged_value_to_percentage(value_range, 2) == 33 + assert ranged_value_to_percentage(value_range, 3) == 50 + assert ranged_value_to_percentage(value_range, 4) == 66 + assert ranged_value_to_percentage(value_range, 5) == 83 + assert ranged_value_to_percentage(value_range, 6) == 100 async def test_percentage_to_ranged_value_small() -> None: """Test a small range of low and high values convert a percentage to a single value.""" - range = (1, 6) + value_range = (1, 6) - assert math.ceil(percentage_to_ranged_value(range, 16)) == 1 - assert math.ceil(percentage_to_ranged_value(range, 33)) == 2 - assert math.ceil(percentage_to_ranged_value(range, 50)) == 3 - assert math.ceil(percentage_to_ranged_value(range, 66)) == 4 - assert math.ceil(percentage_to_ranged_value(range, 83)) == 5 - assert math.ceil(percentage_to_ranged_value(range, 100)) == 6 + assert math.ceil(percentage_to_ranged_value(value_range, 16)) == 1 + assert math.ceil(percentage_to_ranged_value(value_range, 33)) == 2 + assert math.ceil(percentage_to_ranged_value(value_range, 50)) == 3 + assert math.ceil(percentage_to_ranged_value(value_range, 66)) == 4 + assert math.ceil(percentage_to_ranged_value(value_range, 83)) == 5 + assert math.ceil(percentage_to_ranged_value(value_range, 100)) == 6 async def test_ranged_value_to_percentage_starting_at_one() -> None: """Test a range that starts with 1.""" - range = (1, 4) + value_range = (1, 4) - assert ranged_value_to_percentage(range, 1) == 25 - assert ranged_value_to_percentage(range, 2) == 50 - assert ranged_value_to_percentage(range, 3) == 75 - assert ranged_value_to_percentage(range, 4) == 100 + assert ranged_value_to_percentage(value_range, 1) == 25 + assert ranged_value_to_percentage(value_range, 2) == 50 + assert ranged_value_to_percentage(value_range, 3) == 75 + assert ranged_value_to_percentage(value_range, 4) == 100 async def test_ranged_value_to_percentage_starting_high() -> None: """Test a range that does not start with 1.""" - range = (101, 255) + value_range = (101, 255) - assert ranged_value_to_percentage(range, 101) == 0 - assert ranged_value_to_percentage(range, 139) == 25 - assert ranged_value_to_percentage(range, 178) == 50 - assert ranged_value_to_percentage(range, 217) == 75 - assert ranged_value_to_percentage(range, 255) == 100 + assert ranged_value_to_percentage(value_range, 101) == 0 + assert ranged_value_to_percentage(value_range, 139) == 25 + assert ranged_value_to_percentage(value_range, 178) == 50 + assert ranged_value_to_percentage(value_range, 217) == 75 + assert ranged_value_to_percentage(value_range, 255) == 100 async def test_ranged_value_to_percentage_starting_zero() -> None: """Test a range that starts with 0.""" - range = (0, 3) + value_range = (0, 3) - assert ranged_value_to_percentage(range, 0) == 25 - assert ranged_value_to_percentage(range, 1) == 50 - assert ranged_value_to_percentage(range, 2) == 75 - assert ranged_value_to_percentage(range, 3) == 100 + assert ranged_value_to_percentage(value_range, 0) == 25 + assert ranged_value_to_percentage(value_range, 1) == 50 + assert ranged_value_to_percentage(value_range, 2) == 75 + assert ranged_value_to_percentage(value_range, 3) == 100 diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 113a348c1d1..f17489e1488 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -568,13 +568,13 @@ def test_no_recursive_secrets( def test_input_class() -> None: """Test input class.""" - input = yaml_loader.Input("hello") - input2 = yaml_loader.Input("hello") + yaml_input = yaml_loader.Input("hello") + yaml_input2 = yaml_loader.Input("hello") - assert input.name == "hello" - assert input == input2 + assert yaml_input.name == "hello" + assert yaml_input == yaml_input2 - assert len({input, input2}) == 1 + assert len({yaml_input, yaml_input2}) == 1 def test_input(try_both_loaders, try_both_dumpers) -> None: