mirror of
https://github.com/home-assistant/core.git
synced 2025-08-09 23:55:07 +02:00
Merge branch 'home-assistant:dev' into pglab
This commit is contained in:
11
.coveragerc
11
.coveragerc
@@ -361,6 +361,8 @@ omit =
|
||||
homeassistant/components/environment_canada/weather.py
|
||||
homeassistant/components/envisalink/*
|
||||
homeassistant/components/ephember/climate.py
|
||||
homeassistant/components/epic_games_store/__init__.py
|
||||
homeassistant/components/epic_games_store/coordinator.py
|
||||
homeassistant/components/epion/__init__.py
|
||||
homeassistant/components/epion/coordinator.py
|
||||
homeassistant/components/epion/sensor.py
|
||||
@@ -739,6 +741,7 @@ omit =
|
||||
homeassistant/components/lutron/binary_sensor.py
|
||||
homeassistant/components/lutron/cover.py
|
||||
homeassistant/components/lutron/entity.py
|
||||
homeassistant/components/lutron/event.py
|
||||
homeassistant/components/lutron/fan.py
|
||||
homeassistant/components/lutron/light.py
|
||||
homeassistant/components/lutron/switch.py
|
||||
@@ -983,6 +986,7 @@ omit =
|
||||
homeassistant/components/orvibo/switch.py
|
||||
homeassistant/components/osoenergy/__init__.py
|
||||
homeassistant/components/osoenergy/const.py
|
||||
homeassistant/components/osoenergy/sensor.py
|
||||
homeassistant/components/osoenergy/water_heater.py
|
||||
homeassistant/components/osramlightify/light.py
|
||||
homeassistant/components/otp/sensor.py
|
||||
@@ -1154,8 +1158,10 @@ omit =
|
||||
homeassistant/components/roborock/coordinator.py
|
||||
homeassistant/components/rocketchat/notify.py
|
||||
homeassistant/components/romy/__init__.py
|
||||
homeassistant/components/romy/binary_sensor.py
|
||||
homeassistant/components/romy/coordinator.py
|
||||
homeassistant/components/romy/entity.py
|
||||
homeassistant/components/romy/sensor.py
|
||||
homeassistant/components/romy/vacuum.py
|
||||
homeassistant/components/roomba/__init__.py
|
||||
homeassistant/components/roomba/binary_sensor.py
|
||||
@@ -1405,11 +1411,6 @@ omit =
|
||||
homeassistant/components/tado/water_heater.py
|
||||
homeassistant/components/tami4/button.py
|
||||
homeassistant/components/tank_utility/sensor.py
|
||||
homeassistant/components/tankerkoenig/__init__.py
|
||||
homeassistant/components/tankerkoenig/binary_sensor.py
|
||||
homeassistant/components/tankerkoenig/coordinator.py
|
||||
homeassistant/components/tankerkoenig/entity.py
|
||||
homeassistant/components/tankerkoenig/sensor.py
|
||||
homeassistant/components/tapsaff/binary_sensor.py
|
||||
homeassistant/components/tautulli/__init__.py
|
||||
homeassistant/components/tautulli/coordinator.py
|
||||
|
18
.github/workflows/builder.yml
vendored
18
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -242,7 +242,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -320,7 +320,7 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
@@ -450,7 +450,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -458,7 +458,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: translations
|
||||
|
||||
|
86
.github/workflows/ci.yaml
vendored
86
.github/workflows/ci.yaml
vendored
@@ -33,10 +33,10 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 7
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 8
|
||||
HA_SHORT_VERSION: "2024.5"
|
||||
HA_SHORT_VERSION: "2024.6"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -89,7 +89,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: >-
|
||||
@@ -97,7 +97,8 @@ jobs:
|
||||
hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}-${{
|
||||
hashFiles('requirements.txt') }}-${{
|
||||
hashFiles('requirements_all.txt') }}-${{
|
||||
hashFiles('homeassistant/package_constraints.txt') }}" >> $GITHUB_OUTPUT
|
||||
hashFiles('homeassistant/package_constraints.txt') }}-${{
|
||||
hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial pre-commit restore key
|
||||
id: generate_pre-commit_cache_key
|
||||
run: >-
|
||||
@@ -223,7 +224,7 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -269,7 +270,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -309,7 +310,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -348,7 +349,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -442,7 +443,7 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -451,8 +452,10 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Generate partial uv restore key
|
||||
id: generate-uv-key
|
||||
run: >-
|
||||
echo "key=uv-${{ env.UV_CACHE_VERSION }}-${{
|
||||
run: |
|
||||
uv_version=$(cat requirements_test.txt | grep uv | cut -d '=' -f 3)
|
||||
echo "version=${uv_version}" >> $GITHUB_OUTPUT
|
||||
echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
@@ -472,10 +475,13 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-uv-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{ env.UV_CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
@@ -497,8 +503,9 @@ jobs:
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install -U "pip>=21.3.1" setuptools wheel
|
||||
uv pip install -r requirements_all.txt
|
||||
uv pip install "$(grep 'python-gammu' < requirements_all.txt | sed -e 's|# python-gammu|python-gammu|g')"
|
||||
uv pip install -r requirements.txt
|
||||
python -m script.gen_requirements_all ci
|
||||
uv pip install -r requirements_all_pytest.txt
|
||||
uv pip install -r requirements_test.txt
|
||||
uv pip install -e . --config-settings editable_mode=compat
|
||||
|
||||
@@ -513,7 +520,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -545,7 +552,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -578,7 +585,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -622,7 +629,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -688,13 +695,14 @@ jobs:
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -715,7 +723,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -748,13 +756,14 @@ jobs:
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -776,7 +785,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -811,14 +820,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -863,13 +872,14 @@ jobs:
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libmariadb-dev-compat
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -933,7 +943,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -941,7 +951,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -985,13 +995,14 @@ jobs:
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -1056,7 +1067,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1064,7 +1075,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1086,9 +1097,9 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1126,13 +1137,14 @@ jobs:
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -1193,14 +1205,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1219,9 +1231,9 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.25.1
|
||||
uses: github/codeql-action/init@v3.25.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.25.1
|
||||
uses: github/codeql-action/analyze@v3.25.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
|
78
.github/workflows/wheels.yml
vendored
78
.github/workflows/wheels.yml
vendored
@@ -14,6 +14,10 @@ on:
|
||||
- "homeassistant/package_constraints.txt"
|
||||
- "requirements_all.txt"
|
||||
- "requirements.txt"
|
||||
- "script/gen_requirements_all.py"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
@@ -28,7 +32,22 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
||||
- name: Create Python virtual environment
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install -r requirements.txt
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
@@ -63,19 +82,30 @@ jobs:
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Generate requirements
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
|
||||
core:
|
||||
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
@@ -88,15 +118,15 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -126,42 +156,22 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.4
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: (Un)comment packages
|
||||
run: |
|
||||
requirement_files="requirements_all.txt requirements_diff.txt"
|
||||
for requirement_file in ${requirement_files}; do
|
||||
sed -i "s|# pyuserinput|pyuserinput|g" ${requirement_file}
|
||||
sed -i "s|# evdev|evdev|g" ${requirement_file}
|
||||
sed -i "s|# pycups|pycups|g" ${requirement_file}
|
||||
sed -i "s|# decora-wifi|decora-wifi|g" ${requirement_file}
|
||||
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
|
||||
|
||||
# Some packages are not buildable on armhf anymore
|
||||
if [ "${{ matrix.arch }}" = "armhf" ]; then
|
||||
|
||||
# Pandas has issues building on armhf, it is expected they
|
||||
# will drop the platform in the near future (they consider it
|
||||
# "flimsy" on 386). The following packages depend on pandas,
|
||||
# so we comment them out.
|
||||
sed -i "s|env-canada|# env-canada|g" ${requirement_file}
|
||||
sed -i "s|noaa-coops|# noaa-coops|g" ${requirement_file}
|
||||
sed -i "s|pyezviz|# pyezviz|g" ${requirement_file}
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" ${requirement_file}
|
||||
fi
|
||||
|
||||
done
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
@@ -169,7 +179,7 @@ jobs:
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all.txt requirements_all.txt
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
run: |
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.3.7
|
||||
rev: v0.4.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@@ -235,6 +235,7 @@ homeassistant.components.homeworks.*
|
||||
homeassistant.components.http.*
|
||||
homeassistant.components.huawei_lte.*
|
||||
homeassistant.components.humidifier.*
|
||||
homeassistant.components.husqvarna_automower.*
|
||||
homeassistant.components.hydrawise.*
|
||||
homeassistant.components.hyperion.*
|
||||
homeassistant.components.ibeacon.*
|
||||
|
20
CODEOWNERS
20
CODEOWNERS
@@ -127,8 +127,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/aprilaire/ @chamberlain2007
|
||||
/homeassistant/components/aprs/ @PhilRW
|
||||
/tests/components/aprs/ @PhilRW
|
||||
/homeassistant/components/aranet/ @aschmitz @thecode
|
||||
/tests/components/aranet/ @aschmitz @thecode
|
||||
/homeassistant/components/aranet/ @aschmitz @thecode @anrijs
|
||||
/tests/components/aranet/ @aschmitz @thecode @anrijs
|
||||
/homeassistant/components/arcam_fmj/ @elupus
|
||||
/tests/components/arcam_fmj/ @elupus
|
||||
/homeassistant/components/arris_tg2492lg/ @vanbalken
|
||||
@@ -398,6 +398,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
/tests/components/epion/ @lhgravendeel
|
||||
/homeassistant/components/epson/ @pszafer
|
||||
@@ -599,6 +601,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homekit_controller/ @Jc2k @bdraco
|
||||
/homeassistant/components/homematic/ @pvizeli
|
||||
/tests/components/homematic/ @pvizeli
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th
|
||||
/tests/components/homematicip_cloud/ @hahn-th
|
||||
/homeassistant/components/homewizard/ @DCSBL
|
||||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
@@ -873,8 +877,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/motioneye/ @dermotduffy
|
||||
/homeassistant/components/motionmount/ @RJPoelstra
|
||||
/tests/components/motionmount/ @RJPoelstra
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
@@ -1284,8 +1288,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/snmp/ @nmaggioni
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck
|
||||
/tests/components/solaredge/ @frenck
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
||||
/tests/components/solaredge/ @frenck @bdraco
|
||||
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
||||
/homeassistant/components/solarlog/ @Ernst79
|
||||
/tests/components/solarlog/ @Ernst79
|
||||
@@ -1582,8 +1586,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wolflink/ @adamkrol93
|
||||
/tests/components/wolflink/ @adamkrol93
|
||||
/homeassistant/components/wolflink/ @adamkrol93 @mtielen
|
||||
/tests/components/wolflink/ @adamkrol93 @mtielen
|
||||
/homeassistant/components/workday/ @fabaff @gjohansson-ST
|
||||
/tests/components/workday/ @fabaff @gjohansson-ST
|
||||
/homeassistant/components/worldclock/ @fabaff
|
||||
|
@@ -12,7 +12,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.1.27
|
||||
RUN pip3 install uv==0.1.35
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@@ -22,6 +22,7 @@ RUN \
|
||||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavutil-dev \
|
||||
libgammu-dev \
|
||||
libswscale-dev \
|
||||
libswresample-dev \
|
||||
libavfilter-dev \
|
||||
|
@@ -253,6 +253,9 @@ async def async_setup_hass(
|
||||
runtime_config.log_no_color,
|
||||
)
|
||||
|
||||
if runtime_config.debug or hass.loop.get_debug():
|
||||
hass.config.debug = True
|
||||
|
||||
hass.config.safe_mode = runtime_config.safe_mode
|
||||
hass.config.skip_pip = runtime_config.skip_pip
|
||||
hass.config.skip_pip_packages = runtime_config.skip_pip_packages
|
||||
@@ -316,6 +319,7 @@ async def async_setup_hass(
|
||||
hass = core.HomeAssistant(old_config.config_dir)
|
||||
if old_logging:
|
||||
hass.data[DATA_LOGGING] = old_logging
|
||||
hass.config.debug = old_config.debug
|
||||
hass.config.skip_pip = old_config.skip_pip
|
||||
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
||||
hass.config.internal_url = old_config.internal_url
|
||||
|
@@ -8,6 +8,6 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["accuweather==2.1.1"],
|
||||
"requirements": ["accuweather==3.0.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -24,7 +24,7 @@ async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
"""Get info for the info page."""
|
||||
remaining_requests = list(hass.data[DOMAIN].values())[
|
||||
0
|
||||
].accuweather.requests_remaining
|
||||
].coordinator_observation.accuweather.requests_remaining
|
||||
|
||||
return {
|
||||
"can_reach_server": system_health.async_check_can_reach_url(hass, ENDPOINT),
|
||||
|
@@ -157,3 +157,11 @@ class AirthingsHeaterEnergySensor(
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the value reported by the sensor."""
|
||||
return self.coordinator.data[self._id].sensors[self.entity_description.key] # type: ignore[no-any-return]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Check if device and sensor is available in data."""
|
||||
return (
|
||||
super().available
|
||||
and self.entity_description.key in self.coordinator.data[self._id].sensors
|
||||
)
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Support for Alexa skill service end point."""
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
import enum
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -16,7 +17,9 @@ from .const import DOMAIN, SYN_RESOLUTION_MATCH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HANDLERS = Registry() # type: ignore[var-annotated]
|
||||
HANDLERS: Registry[
|
||||
str, Callable[[HomeAssistant, dict[str, Any]], Coroutine[Any, Any, dict[str, Any]]]
|
||||
] = Registry()
|
||||
|
||||
INTENTS_API_ENDPOINT = "/api/alexa"
|
||||
|
||||
@@ -129,8 +132,7 @@ async def async_handle_message(
|
||||
if not (handler := HANDLERS.get(req_type)):
|
||||
raise UnknownRequest(f"Received unknown request {req_type}")
|
||||
|
||||
response: dict[str, Any] = await handler(hass, message)
|
||||
return response
|
||||
return await handler(hass, message)
|
||||
|
||||
|
||||
@HANDLERS.register("SessionEndedRequest")
|
||||
|
@@ -1,3 +1,4 @@
|
||||
"""Constants for the Aranet integration."""
|
||||
|
||||
DOMAIN = "aranet"
|
||||
ARANET_MANUFACTURER_NAME = "SAF Tehnika"
|
||||
|
12
homeassistant/components/aranet/icons.json
Normal file
12
homeassistant/components/aranet/icons.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"radiation_total": {
|
||||
"default": "mdi:radioactive"
|
||||
},
|
||||
"radiation_rate": {
|
||||
"default": "mdi:radioactive"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -13,7 +13,7 @@
|
||||
"connectable": false
|
||||
}
|
||||
],
|
||||
"codeowners": ["@aschmitz", "@thecode"],
|
||||
"codeowners": ["@aschmitz", "@thecode", "@anrijs"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
|
@@ -23,6 +23,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_NAME,
|
||||
ATTR_SW_VERSION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
@@ -37,7 +38,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import ARANET_MANUFACTURER_NAME, DOMAIN
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -48,6 +49,7 @@ class AranetSensorEntityDescription(SensorEntityDescription):
|
||||
# Restrict the type to satisfy the type checker and catch attempts
|
||||
# to use UNDEFINED in the entity descriptions.
|
||||
name: str | None = None
|
||||
scale: float | int = 1
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = {
|
||||
@@ -79,6 +81,24 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"radiation_rate": AranetSensorEntityDescription(
|
||||
key="radiation_rate",
|
||||
translation_key="radiation_rate",
|
||||
name="Radiation Dose Rate",
|
||||
native_unit_of_measurement="μSv/h",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=2,
|
||||
scale=0.001,
|
||||
),
|
||||
"radiation_total": AranetSensorEntityDescription(
|
||||
key="radiation_total",
|
||||
translation_key="radiation_total",
|
||||
name="Radiation Total Dose",
|
||||
native_unit_of_measurement="mSv",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=4,
|
||||
scale=0.000001,
|
||||
),
|
||||
"battery": AranetSensorEntityDescription(
|
||||
key="battery",
|
||||
name="Battery",
|
||||
@@ -115,6 +135,7 @@ def _sensor_device_info_to_hass(
|
||||
hass_device_info = DeviceInfo({})
|
||||
if adv.readings and adv.readings.name:
|
||||
hass_device_info[ATTR_NAME] = adv.readings.name
|
||||
hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME
|
||||
if adv.manufacturer_data:
|
||||
hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version)
|
||||
return hass_device_info
|
||||
@@ -132,6 +153,7 @@ def sensor_update_to_bluetooth_data_update(
|
||||
val = getattr(adv.readings, key)
|
||||
if val == -1:
|
||||
continue
|
||||
val *= desc.scale
|
||||
data[tag] = val
|
||||
names[tag] = desc.name
|
||||
descs[tag] = desc
|
||||
|
@@ -17,7 +17,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"integrations_diabled": "This device doesn't have integrations enabled. Please enable smart home integrations using the app and try again.",
|
||||
"integrations_disabled": "This device doesn't have integrations enabled. Please enable smart home integrations using the app and try again.",
|
||||
"no_devices_found": "No unconfigured Aranet devices found.",
|
||||
"outdated_version": "This device is using outdated firmware. Please update it to at least v1.2.0 and try again."
|
||||
}
|
||||
|
@@ -291,8 +291,11 @@ def websocket_list_runs(
|
||||
msg["id"],
|
||||
{
|
||||
"pipeline_runs": [
|
||||
{"pipeline_run_id": id, "timestamp": pipeline_run.timestamp}
|
||||
for id, pipeline_run in pipeline_debug.items()
|
||||
{
|
||||
"pipeline_run_id": pipeline_run_id,
|
||||
"timestamp": pipeline_run.timestamp,
|
||||
}
|
||||
for pipeline_run_id, pipeline_run in pipeline_debug.items()
|
||||
]
|
||||
},
|
||||
)
|
||||
|
@@ -707,7 +707,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
|
||||
@callback
|
||||
def started_action() -> None:
|
||||
self.hass.bus.async_fire(
|
||||
# This is always a callback from a coro so there is no
|
||||
# risk of this running in a thread which allows us to use
|
||||
# async_fire_internal
|
||||
self.hass.bus.async_fire_internal(
|
||||
EVENT_AUTOMATION_TRIGGERED, event_data, context=trigger_context
|
||||
)
|
||||
|
||||
|
@@ -1,5 +1,8 @@
|
||||
"""Describe logbook events."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.logbook import (
|
||||
LOGBOOK_ENTRY_CONTEXT_ID,
|
||||
LOGBOOK_ENTRY_ENTITY_ID,
|
||||
@@ -16,11 +19,16 @@ from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def async_describe_events(hass: HomeAssistant, async_describe_event): # type: ignore[no-untyped-def]
|
||||
def async_describe_events(
|
||||
hass: HomeAssistant,
|
||||
async_describe_event: Callable[
|
||||
[str, str, Callable[[LazyEventPartialState], dict[str, Any]]], None
|
||||
],
|
||||
) -> None:
|
||||
"""Describe logbook events."""
|
||||
|
||||
@callback
|
||||
def async_describe_logbook_event(event: LazyEventPartialState): # type: ignore[no-untyped-def]
|
||||
def async_describe_logbook_event(event: LazyEventPartialState) -> dict[str, Any]:
|
||||
"""Describe a logbook event."""
|
||||
data = event.data
|
||||
message = "triggered"
|
||||
|
93
homeassistant/components/axis/hub/event_source.py
Normal file
93
homeassistant/components/axis/hub/event_source.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Axis network device abstraction."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import axis
|
||||
from axis.errors import Unauthorized
|
||||
from axis.interfaces.mqtt import mqtt_json_to_event
|
||||
from axis.models.mqtt import ClientState
|
||||
from axis.stream_manager import Signal, State
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN
|
||||
from homeassistant.components.mqtt.models import ReceiveMessage
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.setup import async_when_setup
|
||||
|
||||
|
||||
class AxisEventSource:
|
||||
"""Manage connection to event sources from an Axis device."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: ConfigEntry, api: axis.AxisDevice
|
||||
) -> None:
|
||||
"""Initialize the device."""
|
||||
self.hass = hass
|
||||
self.config_entry = config_entry
|
||||
self.api = api
|
||||
|
||||
self.signal_reachable = f"axis_reachable_{config_entry.entry_id}"
|
||||
|
||||
self.available = True
|
||||
|
||||
@callback
|
||||
def setup(self) -> None:
|
||||
"""Set up the device events."""
|
||||
self.api.stream.connection_status_callback.append(self._connection_status_cb)
|
||||
self.api.enable_events()
|
||||
self.api.stream.start()
|
||||
|
||||
if self.api.vapix.mqtt.supported:
|
||||
async_when_setup(self.hass, MQTT_DOMAIN, self._async_use_mqtt)
|
||||
|
||||
@callback
|
||||
def teardown(self) -> None:
|
||||
"""Tear down connections."""
|
||||
self._disconnect_from_stream()
|
||||
|
||||
@callback
|
||||
def _disconnect_from_stream(self) -> None:
|
||||
"""Stop stream."""
|
||||
if self.api.stream.state != State.STOPPED:
|
||||
self.api.stream.connection_status_callback.clear()
|
||||
self.api.stream.stop()
|
||||
|
||||
async def _async_use_mqtt(self, hass: HomeAssistant, component: str) -> None:
|
||||
"""Set up to use MQTT."""
|
||||
try:
|
||||
status = await self.api.vapix.mqtt.get_client_status()
|
||||
except Unauthorized:
|
||||
# This means the user has too low privileges
|
||||
return
|
||||
|
||||
if status.status.state == ClientState.ACTIVE:
|
||||
self.config_entry.async_on_unload(
|
||||
await mqtt.async_subscribe(
|
||||
hass, f"{status.config.device_topic_prefix}/#", self._mqtt_message
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _mqtt_message(self, message: ReceiveMessage) -> None:
|
||||
"""Receive Axis MQTT message."""
|
||||
self._disconnect_from_stream()
|
||||
|
||||
if message.topic.endswith("event/connection"):
|
||||
return
|
||||
|
||||
event = mqtt_json_to_event(message.payload)
|
||||
self.api.event.handler(event)
|
||||
|
||||
@callback
|
||||
def _connection_status_cb(self, status: Signal) -> None:
|
||||
"""Handle signals of device connection status.
|
||||
|
||||
This is called on every RTSP keep-alive message.
|
||||
Only signal state change if state change is true.
|
||||
"""
|
||||
|
||||
if self.available != (status == Signal.PLAYING):
|
||||
self.available = not self.available
|
||||
async_dispatcher_send(self.hass, self.signal_reachable)
|
@@ -5,24 +5,17 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
import axis
|
||||
from axis.errors import Unauthorized
|
||||
from axis.interfaces.mqtt import mqtt_json_to_event
|
||||
from axis.models.mqtt import ClientState
|
||||
from axis.stream_manager import Signal, State
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN
|
||||
from homeassistant.components.mqtt.models import ReceiveMessage
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.setup import async_when_setup
|
||||
|
||||
from ..const import ATTR_MANUFACTURER, DOMAIN as AXIS_DOMAIN
|
||||
from .config import AxisConfig
|
||||
from .entity_loader import AxisEntityLoader
|
||||
from .event_source import AxisEventSource
|
||||
|
||||
|
||||
class AxisHub:
|
||||
@@ -35,9 +28,9 @@ class AxisHub:
|
||||
self.hass = hass
|
||||
self.config = AxisConfig.from_config_entry(config_entry)
|
||||
self.entity_loader = AxisEntityLoader(self)
|
||||
self.event_source = AxisEventSource(hass, config_entry, api)
|
||||
self.api = api
|
||||
|
||||
self.available = True
|
||||
self.fw_version = api.vapix.firmware_version
|
||||
self.product_type = api.vapix.product_type
|
||||
self.unique_id = format_mac(api.vapix.serial_number)
|
||||
@@ -51,32 +44,23 @@ class AxisHub:
|
||||
hub: AxisHub = hass.data[AXIS_DOMAIN][config_entry.entry_id]
|
||||
return hub
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Connection state to the device."""
|
||||
return self.event_source.available
|
||||
|
||||
# Signals
|
||||
|
||||
@property
|
||||
def signal_reachable(self) -> str:
|
||||
"""Device specific event to signal a change in connection status."""
|
||||
return f"axis_reachable_{self.config.entry.entry_id}"
|
||||
return self.event_source.signal_reachable
|
||||
|
||||
@property
|
||||
def signal_new_address(self) -> str:
|
||||
"""Device specific event to signal a change in device address."""
|
||||
return f"axis_new_address_{self.config.entry.entry_id}"
|
||||
|
||||
# Callbacks
|
||||
|
||||
@callback
|
||||
def connection_status_callback(self, status: Signal) -> None:
|
||||
"""Handle signals of device connection status.
|
||||
|
||||
This is called on every RTSP keep-alive message.
|
||||
Only signal state change if state change is true.
|
||||
"""
|
||||
|
||||
if self.available != (status == Signal.PLAYING):
|
||||
self.available = not self.available
|
||||
async_dispatcher_send(self.hass, self.signal_reachable)
|
||||
|
||||
@staticmethod
|
||||
async def async_new_address_callback(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
@@ -89,6 +73,7 @@ class AxisHub:
|
||||
"""
|
||||
hub = AxisHub.get_hub(hass, config_entry)
|
||||
hub.config = AxisConfig.from_config_entry(config_entry)
|
||||
hub.event_source.config_entry = config_entry
|
||||
hub.api.config.host = hub.config.host
|
||||
async_dispatcher_send(hass, hub.signal_new_address)
|
||||
|
||||
@@ -106,57 +91,19 @@ class AxisHub:
|
||||
sw_version=self.fw_version,
|
||||
)
|
||||
|
||||
async def async_use_mqtt(self, hass: HomeAssistant, component: str) -> None:
|
||||
"""Set up to use MQTT."""
|
||||
try:
|
||||
status = await self.api.vapix.mqtt.get_client_status()
|
||||
except Unauthorized:
|
||||
# This means the user has too low privileges
|
||||
return
|
||||
if status.status.state == ClientState.ACTIVE:
|
||||
self.config.entry.async_on_unload(
|
||||
await mqtt.async_subscribe(
|
||||
hass, f"{status.config.device_topic_prefix}/#", self.mqtt_message
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def mqtt_message(self, message: ReceiveMessage) -> None:
|
||||
"""Receive Axis MQTT message."""
|
||||
self.disconnect_from_stream()
|
||||
if message.topic.endswith("event/connection"):
|
||||
return
|
||||
event = mqtt_json_to_event(message.payload)
|
||||
self.api.event.handler(event)
|
||||
|
||||
# Setup and teardown methods
|
||||
|
||||
@callback
|
||||
def setup(self) -> None:
|
||||
"""Set up the device events."""
|
||||
self.entity_loader.initialize_platforms()
|
||||
|
||||
self.api.stream.connection_status_callback.append(
|
||||
self.connection_status_callback
|
||||
)
|
||||
self.api.enable_events()
|
||||
self.api.stream.start()
|
||||
|
||||
if self.api.vapix.mqtt.supported:
|
||||
async_when_setup(self.hass, MQTT_DOMAIN, self.async_use_mqtt)
|
||||
|
||||
@callback
|
||||
def disconnect_from_stream(self) -> None:
|
||||
"""Stop stream."""
|
||||
if self.api.stream.state != State.STOPPED:
|
||||
self.api.stream.connection_status_callback.clear()
|
||||
self.api.stream.stop()
|
||||
self.event_source.setup()
|
||||
|
||||
async def shutdown(self, event: Event) -> None:
|
||||
"""Stop the event stream."""
|
||||
self.disconnect_from_stream()
|
||||
self.event_source.teardown()
|
||||
|
||||
@callback
|
||||
def teardown(self) -> None:
|
||||
"""Reset this device to default state."""
|
||||
self.disconnect_from_stream()
|
||||
self.event_source.teardown()
|
||||
|
@@ -9,7 +9,7 @@ QUERY_INTERVAL = 300
|
||||
|
||||
RUN_TIMEOUT = 20
|
||||
|
||||
PRESET_MODE_AUTO = "Auto"
|
||||
PRESET_MODE_AUTO = "auto"
|
||||
|
||||
SPEED_COUNT = 7
|
||||
SPEED_RANGE = (1, SPEED_COUNT)
|
||||
|
@@ -48,6 +48,7 @@ class BAFFan(BAFEntity, FanEntity):
|
||||
_attr_preset_modes = [PRESET_MODE_AUTO]
|
||||
_attr_speed_count = SPEED_COUNT
|
||||
_attr_name = None
|
||||
_attr_translation_key = "baf"
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
|
15
homeassistant/components/baf/icons.json
Normal file
15
homeassistant/components/baf/icons.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"entity": {
|
||||
"fan": {
|
||||
"baf": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"auto": "mdi:fan-auto"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -26,6 +26,17 @@
|
||||
"name": "Auto comfort"
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"baf": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"auto": "[%key:component::climate::entity_component::_::state_attributes::fan_mode::state::auto%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"comfort_min_speed": {
|
||||
"name": "Auto Comfort Minimum Speed"
|
||||
|
@@ -4,7 +4,11 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from aiohttp.client_exceptions import (
|
||||
ClientConnectorError,
|
||||
ClientOSError,
|
||||
ServerTimeoutError,
|
||||
)
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.mozart_client import MozartClient
|
||||
|
||||
@@ -44,12 +48,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
model=entry.data[CONF_MODEL],
|
||||
)
|
||||
|
||||
client = MozartClient(host=entry.data[CONF_HOST], websocket_reconnect=True)
|
||||
client = MozartClient(host=entry.data[CONF_HOST])
|
||||
|
||||
# Check connection and try to initialize it.
|
||||
# Check API and WebSocket connection
|
||||
try:
|
||||
await client.get_battery_state(_request_timeout=3)
|
||||
except (ApiException, ClientConnectorError, TimeoutError) as error:
|
||||
await client.check_device_connection(True)
|
||||
except* (
|
||||
ClientConnectorError,
|
||||
ClientOSError,
|
||||
ServerTimeoutError,
|
||||
ApiException,
|
||||
TimeoutError,
|
||||
) as error:
|
||||
await client.close_api_client()
|
||||
raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error
|
||||
|
||||
@@ -61,11 +71,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
client,
|
||||
)
|
||||
|
||||
# Check and start WebSocket connection
|
||||
if not await client.connect_notifications(remote_control=True):
|
||||
raise ConfigEntryNotReady(
|
||||
f"Unable to connect to {entry.title} WebSocket notification channel"
|
||||
)
|
||||
# Start WebSocket connection
|
||||
await client.connect_notifications(remote_control=True, reconnect=True)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==3.2.1.150.6"],
|
||||
"requirements": ["mozart-api==3.4.1.8.5"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
@@ -363,7 +363,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
def is_volume_muted(self) -> bool | None:
|
||||
"""Boolean if volume is currently muted."""
|
||||
if self._volume.muted and self._volume.muted.muted:
|
||||
return self._volume.muted.muted
|
||||
# The any return here is side effect of pydantic v2 compatibility
|
||||
# This will be fixed in the future.
|
||||
return self._volume.muted.muted # type: ignore[no-any-return]
|
||||
return None
|
||||
|
||||
@property
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import contextlib
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -97,7 +96,10 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
await self._camera.async_arm(True)
|
||||
|
||||
except TimeoutError as er:
|
||||
raise HomeAssistantError("Blink failed to arm camera") from er
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_arm",
|
||||
) from er
|
||||
|
||||
self._camera.motion_enabled = True
|
||||
await self.coordinator.async_refresh()
|
||||
@@ -107,7 +109,10 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
try:
|
||||
await self._camera.async_arm(False)
|
||||
except TimeoutError as er:
|
||||
raise HomeAssistantError("Blink failed to disarm camera") from er
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_disarm",
|
||||
) from er
|
||||
|
||||
self._camera.motion_enabled = False
|
||||
await self.coordinator.async_refresh()
|
||||
@@ -124,8 +129,14 @@ class BlinkCamera(CoordinatorEntity[BlinkUpdateCoordinator], Camera):
|
||||
|
||||
async def trigger_camera(self) -> None:
|
||||
"""Trigger camera to take a snapshot."""
|
||||
with contextlib.suppress(TimeoutError):
|
||||
try:
|
||||
await self._camera.snap_picture()
|
||||
except TimeoutError as er:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_snap",
|
||||
) from er
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
def camera_image(
|
||||
|
@@ -106,16 +106,31 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"integration_not_found": {
|
||||
"message": "Integration \"{target}\" not found in registry"
|
||||
"message": "Integration \"{target}\" not found in registry."
|
||||
},
|
||||
"no_path": {
|
||||
"message": "Can't write to directory {target}, no access to path!"
|
||||
},
|
||||
"cant_write": {
|
||||
"message": "Can't write to file"
|
||||
"message": "Can't write to file."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded"
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"failed_arm": {
|
||||
"message": "Blink failed to arm camera."
|
||||
},
|
||||
"failed_disarm": {
|
||||
"message": "Blink failed to disarm camera."
|
||||
},
|
||||
"failed_snap": {
|
||||
"message": "Blink failed to snap a picture."
|
||||
},
|
||||
"failed_arm_motion": {
|
||||
"message": "Blink failed to arm camera motion detection."
|
||||
},
|
||||
"failed_disarm_motion": {
|
||||
"message": "Blink failed to disarm camera motion detection."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
@@ -75,7 +75,8 @@ class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity):
|
||||
|
||||
except TimeoutError as er:
|
||||
raise HomeAssistantError(
|
||||
"Blink failed to arm camera motion detection"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_arm_motion",
|
||||
) from er
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
@@ -87,7 +88,8 @@ class BlinkSwitch(CoordinatorEntity[BlinkUpdateCoordinator], SwitchEntity):
|
||||
|
||||
except TimeoutError as er:
|
||||
raise HomeAssistantError(
|
||||
"Blink failed to dis-arm camera motion detection"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_disarm_motion",
|
||||
) from er
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
@@ -934,7 +934,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
selected_source = items[0]
|
||||
url = f"Play?url={selected_source['url']}&preset_id&image={selected_source['image']}"
|
||||
|
||||
if "is_raw_url" in selected_source and selected_source["is_raw_url"]:
|
||||
if selected_source.get("is_raw_url"):
|
||||
url = selected_source["url"]
|
||||
|
||||
return await self.send_bluesound_command(url)
|
||||
|
@@ -86,6 +86,7 @@ from .manager import HomeAssistantBluetoothManager
|
||||
from .match import BluetoothCallbackMatcher, IntegrationMatcher
|
||||
from .models import BluetoothCallback, BluetoothChange
|
||||
from .storage import BluetoothStorage
|
||||
from .util import adapter_title
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -332,6 +333,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
) from err
|
||||
adapters = await manager.async_get_bluetooth_adapters()
|
||||
details = adapters[adapter]
|
||||
if entry.title == address:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, title=adapter_title(adapter, details)
|
||||
)
|
||||
slots: int = details.get(ADAPTER_CONNECTION_SLOTS) or DEFAULT_CONNECTION_SLOTS
|
||||
entry.async_on_unload(async_register_scanner(hass, scanner, connection_slots=slots))
|
||||
await async_update_device(hass, entry, adapter, details)
|
||||
|
@@ -12,7 +12,6 @@ from bluetooth_adapters import (
|
||||
AdapterDetails,
|
||||
adapter_human_name,
|
||||
adapter_model,
|
||||
adapter_unique_name,
|
||||
get_adapters,
|
||||
)
|
||||
import voluptuous as vol
|
||||
@@ -28,6 +27,7 @@ from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
|
||||
from . import models
|
||||
from .const import CONF_ADAPTER, CONF_DETAILS, CONF_PASSIVE, DOMAIN
|
||||
from .util import adapter_title
|
||||
|
||||
OPTIONS_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -47,14 +47,6 @@ def adapter_display_info(adapter: str, details: AdapterDetails) -> str:
|
||||
return f"{name} {manufacturer} {model}"
|
||||
|
||||
|
||||
def adapter_title(adapter: str, details: AdapterDetails) -> str:
|
||||
"""Return the adapter title."""
|
||||
unique_name = adapter_unique_name(adapter, details[ADAPTER_ADDRESS])
|
||||
model = adapter_model(details)
|
||||
manufacturer = details[ADAPTER_MANUFACTURER] or "Unknown"
|
||||
return f"{manufacturer} {model} ({unique_name})"
|
||||
|
||||
|
||||
class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Bluetooth."""
|
||||
|
||||
|
@@ -16,8 +16,8 @@
|
||||
"requirements": [
|
||||
"bleak==0.21.1",
|
||||
"bleak-retry-connector==3.5.0",
|
||||
"bluetooth-adapters==0.18.0",
|
||||
"bluetooth-auto-recovery==1.4.1",
|
||||
"bluetooth-adapters==0.19.0",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.19.0",
|
||||
"dbus-fast==2.21.1",
|
||||
"habluetooth==2.8.0"
|
||||
|
@@ -2,7 +2,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from bluetooth_adapters import BluetoothAdapters
|
||||
from bluetooth_adapters import (
|
||||
ADAPTER_ADDRESS,
|
||||
ADAPTER_MANUFACTURER,
|
||||
ADAPTER_PRODUCT,
|
||||
AdapterDetails,
|
||||
BluetoothAdapters,
|
||||
adapter_unique_name,
|
||||
)
|
||||
from bluetooth_data_tools import monotonic_time_coarse
|
||||
|
||||
from homeassistant.core import callback
|
||||
@@ -69,3 +76,12 @@ def async_load_history_from_system(
|
||||
connectable_loaded_history[address] = service_info
|
||||
|
||||
return all_loaded_history, connectable_loaded_history
|
||||
|
||||
|
||||
@callback
|
||||
def adapter_title(adapter: str, details: AdapterDetails) -> str:
|
||||
"""Return the adapter title."""
|
||||
unique_name = adapter_unique_name(adapter, details[ADAPTER_ADDRESS])
|
||||
model = details.get(ADAPTER_PRODUCT, "Unknown")
|
||||
manufacturer = details[ADAPTER_MANUFACTURER] or "Unknown"
|
||||
return f"{manufacturer} {model} ({unique_name})"
|
||||
|
@@ -113,7 +113,10 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
):
|
||||
updates[CONF_ACCESS_TOKEN] = token
|
||||
return self.async_update_reload_and_abort(
|
||||
entry, data={**entry.data, **updates}, reason="already_configured"
|
||||
entry,
|
||||
data={**entry.data, **updates},
|
||||
reason="already_configured",
|
||||
reload_even_if_entry_is_unchanged=False,
|
||||
)
|
||||
|
||||
self._discovered = {CONF_HOST: host, CONF_NAME: bond_id}
|
||||
|
@@ -1,3 +1,11 @@
|
||||
"""Constants for the Bring! integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN = "bring"
|
||||
|
||||
ATTR_SENDER: Final = "sender"
|
||||
ATTR_ITEM_NAME: Final = "item"
|
||||
ATTR_NOTIFICATION_TYPE: Final = "message"
|
||||
|
||||
SERVICE_PUSH_NOTIFICATION = "send_message"
|
||||
|
@@ -5,5 +5,8 @@
|
||||
"default": "mdi:cart"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_message": "mdi:cellphone-message"
|
||||
}
|
||||
}
|
||||
|
23
homeassistant/components/bring/services.yaml
Normal file
23
homeassistant/components/bring/services.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
send_message:
|
||||
target:
|
||||
entity:
|
||||
domain: todo
|
||||
integration: bring
|
||||
fields:
|
||||
message:
|
||||
example: urgent_message
|
||||
required: true
|
||||
default: "going_shopping"
|
||||
selector:
|
||||
select:
|
||||
translation_key: "notification_type_selector"
|
||||
options:
|
||||
- "going_shopping"
|
||||
- "changed_list"
|
||||
- "shopping_done"
|
||||
- "urgent_message"
|
||||
item:
|
||||
example: Cilantro
|
||||
required: false
|
||||
selector:
|
||||
text:
|
@@ -38,6 +38,42 @@
|
||||
},
|
||||
"setup_authentication_exception": {
|
||||
"message": "Authentication failed for {email}, check your email and password"
|
||||
},
|
||||
"notify_missing_argument_item": {
|
||||
"message": "Failed to call service {service}. 'URGENT_MESSAGE' requires a value @ data['item']. Got None"
|
||||
},
|
||||
"notify_request_failed": {
|
||||
"message": "Failed to send push notification for bring due to a connection error, try again later"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_message": {
|
||||
"name": "[%key:component::notify::services::notify::name%]",
|
||||
"description": "Send a mobile push notification to members of a shared Bring! list.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "List",
|
||||
"description": "Bring! list whose members (except sender) will be notified."
|
||||
},
|
||||
"message": {
|
||||
"name": "Notification type",
|
||||
"description": "Type of push notification to send to list members."
|
||||
},
|
||||
"item": {
|
||||
"name": "Item (Required if message type `Breaking news` selected)",
|
||||
"description": "Item name to include in a breaking news message e.g. `Breaking news - Please get cilantro!`"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"notification_type_selector": {
|
||||
"options": {
|
||||
"going_shopping": "I'm going shopping! - Last chance for adjustments",
|
||||
"changed_list": "List changed - Check it out",
|
||||
"shopping_done": "Shopping done - you can relax",
|
||||
"urgent_message": "Breaking news - Please get `item`!"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -6,7 +6,8 @@ from typing import TYPE_CHECKING
|
||||
import uuid
|
||||
|
||||
from bring_api.exceptions import BringRequestException
|
||||
from bring_api.types import BringItem, BringItemOperation
|
||||
from bring_api.types import BringItem, BringItemOperation, BringNotificationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.todo import (
|
||||
TodoItem,
|
||||
@@ -16,11 +17,18 @@ from homeassistant.components.todo import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.config_validation import make_entity_service_schema
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import (
|
||||
ATTR_ITEM_NAME,
|
||||
ATTR_NOTIFICATION_TYPE,
|
||||
DOMAIN,
|
||||
SERVICE_PUSH_NOTIFICATION,
|
||||
)
|
||||
from .coordinator import BringData, BringDataUpdateCoordinator
|
||||
|
||||
|
||||
@@ -46,6 +54,21 @@ async def async_setup_entry(
|
||||
for bring_list in coordinator.data.values()
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_PUSH_NOTIFICATION,
|
||||
make_entity_service_schema(
|
||||
{
|
||||
vol.Required(ATTR_NOTIFICATION_TYPE): vol.All(
|
||||
vol.Upper, cv.enum(BringNotificationType)
|
||||
),
|
||||
vol.Optional(ATTR_ITEM_NAME): cv.string,
|
||||
}
|
||||
),
|
||||
"async_send_message",
|
||||
)
|
||||
|
||||
|
||||
class BringTodoListEntity(
|
||||
CoordinatorEntity[BringDataUpdateCoordinator], TodoListEntity
|
||||
@@ -231,3 +254,26 @@ class BringTodoListEntity(
|
||||
) from e
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_send_message(
|
||||
self,
|
||||
message: BringNotificationType,
|
||||
item: str | None = None,
|
||||
) -> None:
|
||||
"""Send a push notification to members of a shared bring list."""
|
||||
|
||||
try:
|
||||
await self.coordinator.bring.notify(self._list_uuid, message, item or None)
|
||||
except BringRequestException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="notify_request_failed",
|
||||
) from e
|
||||
except ValueError as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="notify_missing_argument_item",
|
||||
translation_placeholders={
|
||||
"service": f"{DOMAIN}.{SERVICE_PUSH_NOTIFICATION}",
|
||||
},
|
||||
) from e
|
||||
|
@@ -5,6 +5,7 @@ import voluptuous as vol
|
||||
from homeassistant.const import CONF_NAME, CONF_URL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, discovery
|
||||
import homeassistant.helpers.issue_registry as ir
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
DOMAIN = "circuit"
|
||||
@@ -26,6 +27,17 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Unify Circuit component."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"service_removal",
|
||||
breaks_in_ha_version="2024.7.0",
|
||||
is_fixable=False,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="service_removal",
|
||||
translation_placeholders={"integration": "Unify Circuit", "domain": DOMAIN},
|
||||
)
|
||||
webhooks = config[DOMAIN][CONF_WEBHOOK]
|
||||
|
||||
for webhook_conf in webhooks:
|
||||
|
8
homeassistant/components/circuit/strings.json
Normal file
8
homeassistant/components/circuit/strings.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"issues": {
|
||||
"service_removal": {
|
||||
"title": "The {integration} integration is being removed",
|
||||
"description": "The {integration} integration will be removed, as the service is no longer maintained.\n\n\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,11 +7,14 @@ from collections.abc import Awaitable, Callable
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
from typing import cast
|
||||
from urllib.parse import quote_plus, urljoin
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import alexa, google_assistant
|
||||
from homeassistant.components import alexa, google_assistant, http
|
||||
from homeassistant.components.auth import STRICT_CONNECTION_URL
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_DESCRIPTION,
|
||||
@@ -21,8 +24,21 @@ from homeassistant.const import (
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HassJob, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
HassJob,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import (
|
||||
HomeAssistantError,
|
||||
ServiceValidationError,
|
||||
Unauthorized,
|
||||
UnknownUser,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entityfilter
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
@@ -31,6 +47,7 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
@@ -265,18 +282,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||
|
||||
_remote_handle_prefs_updated(cloud)
|
||||
|
||||
async def _service_handler(service: ServiceCall) -> None:
|
||||
"""Handle service for cloud."""
|
||||
if service.service == SERVICE_REMOTE_CONNECT:
|
||||
await prefs.async_update(remote_enabled=True)
|
||||
elif service.service == SERVICE_REMOTE_DISCONNECT:
|
||||
await prefs.async_update(remote_enabled=False)
|
||||
|
||||
async_register_admin_service(hass, DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler)
|
||||
async_register_admin_service(
|
||||
hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler
|
||||
)
|
||||
_setup_services(hass, prefs)
|
||||
|
||||
async def async_startup_repairs(_: datetime) -> None:
|
||||
"""Create repair issues after startup."""
|
||||
@@ -395,3 +401,67 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
@callback
|
||||
def _setup_services(hass: HomeAssistant, prefs: CloudPreferences) -> None:
|
||||
"""Set up services for cloud component."""
|
||||
|
||||
async def _service_handler(service: ServiceCall) -> None:
|
||||
"""Handle service for cloud."""
|
||||
if service.service == SERVICE_REMOTE_CONNECT:
|
||||
await prefs.async_update(remote_enabled=True)
|
||||
elif service.service == SERVICE_REMOTE_DISCONNECT:
|
||||
await prefs.async_update(remote_enabled=False)
|
||||
|
||||
async_register_admin_service(hass, DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler)
|
||||
async_register_admin_service(
|
||||
hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler
|
||||
)
|
||||
|
||||
async def create_temporary_strict_connection_url(
|
||||
call: ServiceCall,
|
||||
) -> ServiceResponse:
|
||||
"""Create a strict connection url and return it."""
|
||||
# Copied form homeassistant/helpers/service.py#_async_admin_handler
|
||||
# as the helper supports no responses yet
|
||||
if call.context.user_id:
|
||||
user = await hass.auth.async_get_user(call.context.user_id)
|
||||
if user is None:
|
||||
raise UnknownUser(context=call.context)
|
||||
if not user.is_admin:
|
||||
raise Unauthorized(context=call.context)
|
||||
|
||||
if prefs.strict_connection is http.const.StrictConnectionMode.DISABLED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="strict_connection_not_enabled",
|
||||
)
|
||||
|
||||
try:
|
||||
url = get_url(hass, require_cloud=True)
|
||||
except NoURLAvailableError as ex:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_url_available",
|
||||
) from ex
|
||||
|
||||
path = async_sign_path(
|
||||
hass,
|
||||
STRICT_CONNECTION_URL,
|
||||
timedelta(hours=1),
|
||||
use_content_user=True,
|
||||
)
|
||||
url = urljoin(url, path)
|
||||
|
||||
return {
|
||||
"url": f"https://login.home-assistant.io?u={quote_plus(url)}",
|
||||
"direct_url": url,
|
||||
}
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
"create_temporary_strict_connection_url",
|
||||
create_temporary_strict_connection_url,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
@@ -250,6 +250,7 @@ class CloudClient(Interface):
|
||||
"enabled": self._prefs.remote_enabled,
|
||||
"instance_domain": self.cloud.remote.instance_domain,
|
||||
"alias": self.cloud.remote.alias,
|
||||
"strict_connection": self._prefs.strict_connection,
|
||||
},
|
||||
"version": HA_VERSION,
|
||||
"instance_id": self.prefs.instance_id,
|
||||
|
@@ -33,6 +33,7 @@ PREF_GOOGLE_SETTINGS_VERSION = "google_settings_version"
|
||||
PREF_TTS_DEFAULT_VOICE = "tts_default_voice"
|
||||
PREF_GOOGLE_CONNECTED = "google_connected"
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE = "remote_allow_remote_enable"
|
||||
PREF_STRICT_CONNECTION = "strict_connection"
|
||||
DEFAULT_TTS_DEFAULT_VOICE = ("en-US", "JennyNeural")
|
||||
DEFAULT_DISABLE_2FA = False
|
||||
DEFAULT_ALEXA_REPORT_STATE = True
|
||||
|
@@ -19,7 +19,7 @@ from hass_nabucasa.const import STATE_DISCONNECTED
|
||||
from hass_nabucasa.voice import TTS_VOICES
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components import http, websocket_api
|
||||
from homeassistant.components.alexa import (
|
||||
entities as alexa_entities,
|
||||
errors as alexa_errors,
|
||||
@@ -46,6 +46,7 @@ from .const import (
|
||||
PREF_GOOGLE_REPORT_STATE,
|
||||
PREF_GOOGLE_SECURE_DEVICES_PIN,
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE,
|
||||
PREF_STRICT_CONNECTION,
|
||||
PREF_TTS_DEFAULT_VOICE,
|
||||
REQUEST_TIMEOUT,
|
||||
)
|
||||
@@ -452,6 +453,9 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]:
|
||||
vol.Coerce(tuple), validate_language_voice
|
||||
),
|
||||
vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool,
|
||||
vol.Optional(PREF_STRICT_CONNECTION): vol.Coerce(
|
||||
http.const.StrictConnectionMode
|
||||
),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
|
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"services": {
|
||||
"create_temporary_strict_connection_url": "mdi:login-variant",
|
||||
"remote_connect": "mdi:cloud",
|
||||
"remote_disconnect": "mdi:cloud-off"
|
||||
}
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"name": "Home Assistant Cloud",
|
||||
"after_dependencies": ["assist_pipeline", "google_assistant", "alexa"],
|
||||
"codeowners": ["@home-assistant/cloud"],
|
||||
"dependencies": ["http", "repairs", "webhook"],
|
||||
"dependencies": ["auth", "http", "repairs", "webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/cloud",
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
|
@@ -10,7 +10,7 @@ from hass_nabucasa.voice import MAP_VOICE
|
||||
|
||||
from homeassistant.auth.const import GROUP_ID_ADMIN
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.components import webhook
|
||||
from homeassistant.components import http, webhook
|
||||
from homeassistant.components.google_assistant.http import (
|
||||
async_get_users as async_get_google_assistant_users,
|
||||
)
|
||||
@@ -44,6 +44,7 @@ from .const import (
|
||||
PREF_INSTANCE_ID,
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE,
|
||||
PREF_REMOTE_DOMAIN,
|
||||
PREF_STRICT_CONNECTION,
|
||||
PREF_TTS_DEFAULT_VOICE,
|
||||
PREF_USERNAME,
|
||||
)
|
||||
@@ -176,6 +177,7 @@ class CloudPreferences:
|
||||
google_settings_version: int | UndefinedType = UNDEFINED,
|
||||
google_connected: bool | UndefinedType = UNDEFINED,
|
||||
remote_allow_remote_enable: bool | UndefinedType = UNDEFINED,
|
||||
strict_connection: http.const.StrictConnectionMode | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update user preferences."""
|
||||
prefs = {**self._prefs}
|
||||
@@ -195,6 +197,7 @@ class CloudPreferences:
|
||||
(PREF_REMOTE_DOMAIN, remote_domain),
|
||||
(PREF_GOOGLE_CONNECTED, google_connected),
|
||||
(PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable),
|
||||
(PREF_STRICT_CONNECTION, strict_connection),
|
||||
):
|
||||
if value is not UNDEFINED:
|
||||
prefs[key] = value
|
||||
@@ -242,6 +245,7 @@ class CloudPreferences:
|
||||
PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin,
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE: self.remote_allow_remote_enable,
|
||||
PREF_TTS_DEFAULT_VOICE: self.tts_default_voice,
|
||||
PREF_STRICT_CONNECTION: self.strict_connection,
|
||||
}
|
||||
|
||||
@property
|
||||
@@ -358,6 +362,17 @@ class CloudPreferences:
|
||||
"""
|
||||
return self._prefs.get(PREF_TTS_DEFAULT_VOICE, DEFAULT_TTS_DEFAULT_VOICE) # type: ignore[no-any-return]
|
||||
|
||||
@property
|
||||
def strict_connection(self) -> http.const.StrictConnectionMode:
|
||||
"""Return the strict connection mode."""
|
||||
mode = self._prefs.get(
|
||||
PREF_STRICT_CONNECTION, http.const.StrictConnectionMode.DISABLED
|
||||
)
|
||||
|
||||
if not isinstance(mode, http.const.StrictConnectionMode):
|
||||
mode = http.const.StrictConnectionMode(mode)
|
||||
return mode # type: ignore[no-any-return]
|
||||
|
||||
async def get_cloud_user(self) -> str:
|
||||
"""Return ID of Home Assistant Cloud system user."""
|
||||
user = await self._load_cloud_user()
|
||||
@@ -415,4 +430,5 @@ class CloudPreferences:
|
||||
PREF_REMOTE_DOMAIN: None,
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE: True,
|
||||
PREF_USERNAME: username,
|
||||
PREF_STRICT_CONNECTION: http.const.StrictConnectionMode.DISABLED,
|
||||
}
|
||||
|
@@ -5,6 +5,14 @@
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"strict_connection_not_enabled": {
|
||||
"message": "Strict connection is not enabled for cloud requests"
|
||||
},
|
||||
"no_url_available": {
|
||||
"message": "No cloud URL available.\nPlease mark sure you have a working Remote UI."
|
||||
}
|
||||
},
|
||||
"system_health": {
|
||||
"info": {
|
||||
"can_reach_cert_server": "Reach Certificate Server",
|
||||
@@ -73,6 +81,10 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create_temporary_strict_connection_url": {
|
||||
"name": "Create a temporary strict connection URL",
|
||||
"description": "Create a temporary strict connection URL, which can be used to login on another device."
|
||||
},
|
||||
"remote_connect": {
|
||||
"name": "Remote connect",
|
||||
"description": "Makes the instance UI accessible from outside of the local network by using Home Assistant Cloud."
|
||||
|
15
homeassistant/components/cloud/util.py
Normal file
15
homeassistant/components/cloud/util.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Cloud util functions."""
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .client import CloudClient
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def get_strict_connection_mode(hass: HomeAssistant) -> http.const.StrictConnectionMode:
|
||||
"""Get the strict connection mode."""
|
||||
cloud: Cloud[CloudClient] = hass.data[DOMAIN]
|
||||
return cloud.client.prefs.strict_connection
|
@@ -4,7 +4,9 @@
|
||||
"codeowners": ["@chemelli74"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/comelit",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiocomelit==0.9.0"]
|
||||
}
|
||||
|
@@ -30,6 +30,7 @@ from homeassistant.helpers.update_coordinator import (
|
||||
)
|
||||
|
||||
from .const import (
|
||||
API_RETRY_TIMES,
|
||||
CONF_ACCOUNT,
|
||||
CONF_CONFIG_LISTENER,
|
||||
CONF_CONTROLLER_UNIQUE_ID,
|
||||
@@ -47,6 +48,17 @@ _LOGGER = logging.getLogger(__name__)
|
||||
PLATFORMS = [Platform.LIGHT, Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def call_c4_api_retry(func, *func_args):
|
||||
"""Call C4 API function and retry on failure."""
|
||||
for i in range(API_RETRY_TIMES):
|
||||
try:
|
||||
return await func(*func_args)
|
||||
except client_exceptions.ClientError as exception:
|
||||
_LOGGER.error("Error connecting to Control4 account API: %s", exception)
|
||||
if i == API_RETRY_TIMES - 1:
|
||||
raise ConfigEntryNotReady(exception) from exception
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Control4 from a config entry."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
@@ -74,18 +86,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
controller_unique_id = config[CONF_CONTROLLER_UNIQUE_ID]
|
||||
entry_data[CONF_CONTROLLER_UNIQUE_ID] = controller_unique_id
|
||||
|
||||
director_token_dict = await account.getDirectorBearerToken(controller_unique_id)
|
||||
director_session = aiohttp_client.async_get_clientsession(hass, verify_ssl=False)
|
||||
director_token_dict = await call_c4_api_retry(
|
||||
account.getDirectorBearerToken, controller_unique_id
|
||||
)
|
||||
|
||||
director_session = aiohttp_client.async_get_clientsession(hass, verify_ssl=False)
|
||||
director = C4Director(
|
||||
config[CONF_HOST], director_token_dict[CONF_TOKEN], director_session
|
||||
)
|
||||
entry_data[CONF_DIRECTOR] = director
|
||||
|
||||
# Add Control4 controller to device registry
|
||||
controller_href = (await account.getAccountControllers())["href"]
|
||||
entry_data[CONF_DIRECTOR_SW_VERSION] = await account.getControllerOSVersion(
|
||||
controller_href
|
||||
controller_href = (await call_c4_api_retry(account.getAccountControllers))["href"]
|
||||
entry_data[CONF_DIRECTOR_SW_VERSION] = await call_c4_api_retry(
|
||||
account.getControllerOSVersion, controller_href
|
||||
)
|
||||
|
||||
_, model, mac_address = controller_unique_id.split("_", 3)
|
||||
|
@@ -5,6 +5,8 @@ DOMAIN = "control4"
|
||||
DEFAULT_SCAN_INTERVAL = 5
|
||||
MIN_SCAN_INTERVAL = 1
|
||||
|
||||
API_RETRY_TIMES = 5
|
||||
|
||||
CONF_ACCOUNT = "account"
|
||||
CONF_DIRECTOR = "director"
|
||||
CONF_DIRECTOR_SW_VERSION = "director_sw_version"
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.3"]
|
||||
"requirements": ["hassil==1.6.1", "home-assistant-intents==2024.4.24"]
|
||||
}
|
||||
|
@@ -15,7 +15,7 @@
|
||||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.0.0",
|
||||
"aiodiscover==2.0.0",
|
||||
"aiodiscover==2.1.0",
|
||||
"cached_ipaddress==0.3.0"
|
||||
]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/drop_connect",
|
||||
"iot_class": "local_push",
|
||||
"mqtt": ["drop_connect/discovery/#"],
|
||||
"requirements": ["dropmqttapi==1.0.2"]
|
||||
"requirements": ["dropmqttapi==1.0.3"]
|
||||
}
|
||||
|
@@ -2,23 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dwdwfsapi import DwdWeatherWarningsAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONF_REGION_IDENTIFIER, DOMAIN, PLATFORMS
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import DwdWeatherWarningsCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
region_identifier: str = entry.data[CONF_REGION_IDENTIFIER]
|
||||
|
||||
# Initialize the API and coordinator.
|
||||
api = await hass.async_add_executor_job(DwdWeatherWarningsAPI, region_identifier)
|
||||
coordinator = DwdWeatherWarningsCoordinator(hass, api)
|
||||
|
||||
coordinator = DwdWeatherWarningsCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
|
@@ -8,9 +8,15 @@ from dwdwfsapi import DwdWeatherWarningsAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig
|
||||
|
||||
from .const import CONF_REGION_IDENTIFIER, DOMAIN
|
||||
from .const import CONF_REGION_DEVICE_TRACKER, CONF_REGION_IDENTIFIER, DOMAIN
|
||||
from .exceptions import EntityNotFoundError
|
||||
from .util import get_position_data
|
||||
|
||||
EXCLUSIVE_OPTIONS = (CONF_REGION_IDENTIFIER, CONF_REGION_DEVICE_TRACKER)
|
||||
|
||||
|
||||
class DwdWeatherWarningsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@@ -25,27 +31,70 @@ class DwdWeatherWarningsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict = {}
|
||||
|
||||
if user_input is not None:
|
||||
region_identifier = user_input[CONF_REGION_IDENTIFIER]
|
||||
# Check, if either CONF_REGION_IDENTIFIER or CONF_GPS_TRACKER has been set.
|
||||
if all(k not in user_input for k in EXCLUSIVE_OPTIONS):
|
||||
errors["base"] = "no_identifier"
|
||||
elif all(k in user_input for k in EXCLUSIVE_OPTIONS):
|
||||
errors["base"] = "ambiguous_identifier"
|
||||
elif CONF_REGION_IDENTIFIER in user_input:
|
||||
# Validate region identifier using the API
|
||||
identifier = user_input[CONF_REGION_IDENTIFIER]
|
||||
|
||||
# Validate region identifier using the API
|
||||
if not await self.hass.async_add_executor_job(
|
||||
DwdWeatherWarningsAPI, region_identifier
|
||||
):
|
||||
errors["base"] = "invalid_identifier"
|
||||
if not await self.hass.async_add_executor_job(
|
||||
DwdWeatherWarningsAPI, identifier
|
||||
):
|
||||
errors["base"] = "invalid_identifier"
|
||||
|
||||
if not errors:
|
||||
# Set the unique ID for this config entry.
|
||||
await self.async_set_unique_id(region_identifier)
|
||||
self._abort_if_unique_id_configured()
|
||||
if not errors:
|
||||
# Set the unique ID for this config entry.
|
||||
await self.async_set_unique_id(identifier)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(title=region_identifier, data=user_input)
|
||||
return self.async_create_entry(title=identifier, data=user_input)
|
||||
else: # CONF_REGION_DEVICE_TRACKER
|
||||
device_tracker = user_input[CONF_REGION_DEVICE_TRACKER]
|
||||
registry = er.async_get(self.hass)
|
||||
entity_entry = registry.async_get(device_tracker)
|
||||
|
||||
if entity_entry is None:
|
||||
errors["base"] = "entity_not_found"
|
||||
else:
|
||||
try:
|
||||
position = get_position_data(self.hass, entity_entry.id)
|
||||
except EntityNotFoundError:
|
||||
errors["base"] = "entity_not_found"
|
||||
except AttributeError:
|
||||
errors["base"] = "attribute_not_found"
|
||||
else:
|
||||
# Validate position using the API
|
||||
if not await self.hass.async_add_executor_job(
|
||||
DwdWeatherWarningsAPI, position
|
||||
):
|
||||
errors["base"] = "invalid_identifier"
|
||||
|
||||
# Position is valid here, because the API call was successful.
|
||||
if not errors and position is not None and entity_entry is not None:
|
||||
# Set the unique ID for this config entry.
|
||||
await self.async_set_unique_id(entity_entry.id)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# Replace entity ID with registry ID for more stability.
|
||||
user_input[CONF_REGION_DEVICE_TRACKER] = entity_entry.id
|
||||
|
||||
return self.async_create_entry(
|
||||
title=device_tracker.removeprefix("device_tracker."),
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_REGION_IDENTIFIER): cv.string,
|
||||
vol.Optional(CONF_REGION_IDENTIFIER): cv.string,
|
||||
vol.Optional(CONF_REGION_DEVICE_TRACKER): EntitySelector(
|
||||
EntitySelectorConfig(domain="device_tracker")
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
@@ -14,6 +14,7 @@ DOMAIN: Final = "dwd_weather_warnings"
|
||||
|
||||
CONF_REGION_NAME: Final = "region_name"
|
||||
CONF_REGION_IDENTIFIER: Final = "region_identifier"
|
||||
CONF_REGION_DEVICE_TRACKER: Final = "region_device_tracker"
|
||||
|
||||
ATTR_REGION_NAME: Final = "region_name"
|
||||
ATTR_REGION_ID: Final = "region_id"
|
||||
|
@@ -4,23 +4,79 @@ from __future__ import annotations
|
||||
|
||||
from dwdwfsapi import DwdWeatherWarningsAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import location
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER
|
||||
from .const import (
|
||||
CONF_REGION_DEVICE_TRACKER,
|
||||
CONF_REGION_IDENTIFIER,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
from .exceptions import EntityNotFoundError
|
||||
from .util import get_position_data
|
||||
|
||||
|
||||
class DwdWeatherWarningsCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Custom coordinator for the dwd_weather_warnings integration."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: DwdWeatherWarningsAPI) -> None:
|
||||
config_entry: ConfigEntry
|
||||
api: DwdWeatherWarningsAPI
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Initialize the dwd_weather_warnings coordinator."""
|
||||
super().__init__(
|
||||
hass, LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
|
||||
self.api = api
|
||||
self._device_tracker = None
|
||||
self._previous_position = None
|
||||
|
||||
async def async_config_entry_first_refresh(self) -> None:
|
||||
"""Perform first refresh."""
|
||||
if region_identifier := self.config_entry.data.get(CONF_REGION_IDENTIFIER):
|
||||
self.api = await self.hass.async_add_executor_job(
|
||||
DwdWeatherWarningsAPI, region_identifier
|
||||
)
|
||||
else:
|
||||
self._device_tracker = self.config_entry.data.get(
|
||||
CONF_REGION_DEVICE_TRACKER
|
||||
)
|
||||
|
||||
await super().async_config_entry_first_refresh()
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Get the latest data from the DWD Weather Warnings API."""
|
||||
await self.hass.async_add_executor_job(self.api.update)
|
||||
if self._device_tracker:
|
||||
try:
|
||||
position = get_position_data(self.hass, self._device_tracker)
|
||||
except (EntityNotFoundError, AttributeError) as err:
|
||||
raise UpdateFailed(f"Error fetching position: {repr(err)}") from err
|
||||
|
||||
distance = None
|
||||
if self._previous_position is not None:
|
||||
distance = location.distance(
|
||||
self._previous_position[0],
|
||||
self._previous_position[1],
|
||||
position[0],
|
||||
position[1],
|
||||
)
|
||||
|
||||
if distance is None or distance > 50:
|
||||
# Only create a new object on the first update
|
||||
# or when the distance to the previous position
|
||||
# changes by more than 50 meters (to take GPS
|
||||
# inaccuracy into account).
|
||||
self.api = await self.hass.async_add_executor_job(
|
||||
DwdWeatherWarningsAPI, position
|
||||
)
|
||||
else:
|
||||
# Otherwise update the API to check for new warnings.
|
||||
await self.hass.async_add_executor_job(self.api.update)
|
||||
|
||||
self._previous_position = position
|
||||
else:
|
||||
await self.hass.async_add_executor_job(self.api.update)
|
||||
|
@@ -0,0 +1,7 @@
|
||||
"""Exceptions for the dwd_weather_warnings integration."""
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
class EntityNotFoundError(HomeAssistantError):
|
||||
"""When a referenced entity was not found."""
|
@@ -11,6 +11,8 @@ Wetterwarnungen (Stufe 1)
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -93,29 +95,27 @@ class DwdWeatherWarningsSensor(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
self.api = coordinator.api
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
def native_value(self) -> int | None:
|
||||
"""Return the state of the sensor."""
|
||||
if self.entity_description.key == CURRENT_WARNING_SENSOR:
|
||||
return self.api.current_warning_level
|
||||
return self.coordinator.api.current_warning_level
|
||||
|
||||
return self.api.expected_warning_level
|
||||
return self.coordinator.api.expected_warning_level
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the sensor."""
|
||||
data = {
|
||||
ATTR_REGION_NAME: self.api.warncell_name,
|
||||
ATTR_REGION_ID: self.api.warncell_id,
|
||||
ATTR_LAST_UPDATE: self.api.last_update,
|
||||
ATTR_REGION_NAME: self.coordinator.api.warncell_name,
|
||||
ATTR_REGION_ID: self.coordinator.api.warncell_id,
|
||||
ATTR_LAST_UPDATE: self.coordinator.api.last_update,
|
||||
}
|
||||
|
||||
if self.entity_description.key == CURRENT_WARNING_SENSOR:
|
||||
searched_warnings = self.api.current_warnings
|
||||
searched_warnings = self.coordinator.api.current_warnings
|
||||
else:
|
||||
searched_warnings = self.api.expected_warnings
|
||||
searched_warnings = self.coordinator.api.expected_warnings
|
||||
|
||||
data[ATTR_WARNING_COUNT] = len(searched_warnings)
|
||||
|
||||
@@ -142,4 +142,4 @@ class DwdWeatherWarningsSensor(
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Could the device be accessed during the last update call."""
|
||||
return self.api.data_valid
|
||||
return self.coordinator.api.data_valid
|
||||
|
@@ -2,17 +2,22 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "To identify the desired region, the warncell ID / name is required.",
|
||||
"description": "To identify the desired region, either the warncell ID / name or device tracker is required. The provided device tracker has to contain the attributes 'latitude' and 'longitude'.",
|
||||
"data": {
|
||||
"region_identifier": "Warncell ID or name"
|
||||
"region_identifier": "Warncell ID or name",
|
||||
"region_device_tracker": "Device tracker entity"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_identifier": "The specified region identifier is invalid."
|
||||
"no_identifier": "Either the region identifier or device tracker is required.",
|
||||
"ambiguous_identifier": "The region identifier and device tracker can not be specified together.",
|
||||
"invalid_identifier": "The specified region identifier / device tracker is invalid.",
|
||||
"entity_not_found": "The specified device tracker entity was not found.",
|
||||
"attribute_not_found": "The required `latitude` or `longitude` attribute was not found in the specified device tracker."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "Warncell ID / name is already configured.",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"invalid_identifier": "[%key:component::dwd_weather_warnings::config::error::invalid_identifier%]"
|
||||
}
|
||||
},
|
||||
|
39
homeassistant/components/dwd_weather_warnings/util.py
Normal file
39
homeassistant/components/dwd_weather_warnings/util.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Util functions for the dwd_weather_warnings integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .exceptions import EntityNotFoundError
|
||||
|
||||
|
||||
def get_position_data(
|
||||
hass: HomeAssistant, registry_id: str
|
||||
) -> tuple[float, float] | None:
|
||||
"""Extract longitude and latitude from a device tracker."""
|
||||
registry = er.async_get(hass)
|
||||
registry_entry = registry.async_get(registry_id)
|
||||
if registry_entry is None:
|
||||
raise EntityNotFoundError(f"Failed to find registry entry {registry_id}")
|
||||
|
||||
entity = hass.states.get(registry_entry.entity_id)
|
||||
if entity is None:
|
||||
raise EntityNotFoundError(f"Failed to find entity {registry_entry.entity_id}")
|
||||
|
||||
latitude = entity.attributes.get(ATTR_LATITUDE)
|
||||
if not latitude:
|
||||
raise AttributeError(
|
||||
f"Failed to find attribute '{ATTR_LATITUDE}' in {registry_entry.entity_id}",
|
||||
ATTR_LATITUDE,
|
||||
)
|
||||
|
||||
longitude = entity.attributes.get(ATTR_LONGITUDE)
|
||||
if not longitude:
|
||||
raise AttributeError(
|
||||
f"Failed to find attribute '{ATTR_LONGITUDE}' in {registry_entry.entity_id}",
|
||||
ATTR_LONGITUDE,
|
||||
)
|
||||
|
||||
return (latitude, longitude)
|
@@ -73,6 +73,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# The legacy Ecobee notify.notify service is deprecated
|
||||
# was with HA Core 2024.5.0 and will be removed with HA core 2024.11.0
|
||||
hass.async_create_task(
|
||||
discovery.async_load_platform(
|
||||
hass,
|
||||
@@ -97,7 +99,7 @@ class EcobeeData:
|
||||
) -> None:
|
||||
"""Initialize the Ecobee data object."""
|
||||
self._hass = hass
|
||||
self._entry = entry
|
||||
self.entry = entry
|
||||
self.ecobee = Ecobee(
|
||||
config={ECOBEE_API_KEY: api_key, ECOBEE_REFRESH_TOKEN: refresh_token}
|
||||
)
|
||||
@@ -117,7 +119,7 @@ class EcobeeData:
|
||||
_LOGGER.debug("Refreshing ecobee tokens and updating config entry")
|
||||
if await self._hass.async_add_executor_job(self.ecobee.refresh_tokens):
|
||||
self._hass.config_entries.async_update_entry(
|
||||
self._entry,
|
||||
self.entry,
|
||||
data={
|
||||
CONF_API_KEY: self.ecobee.config[ECOBEE_API_KEY],
|
||||
CONF_REFRESH_TOKEN: self.ecobee.config[ECOBEE_REFRESH_TOKEN],
|
||||
|
@@ -12,7 +12,10 @@ from homeassistant.components.climate import (
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
FAN_AUTO,
|
||||
FAN_ON,
|
||||
PRESET_AWAY,
|
||||
PRESET_HOME,
|
||||
PRESET_NONE,
|
||||
PRESET_SLEEP,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
@@ -60,9 +63,6 @@ PRESET_TEMPERATURE = "temp"
|
||||
PRESET_VACATION = "vacation"
|
||||
PRESET_HOLD_NEXT_TRANSITION = "next_transition"
|
||||
PRESET_HOLD_INDEFINITE = "indefinite"
|
||||
AWAY_MODE = "awayMode"
|
||||
PRESET_HOME = "home"
|
||||
PRESET_SLEEP = "sleep"
|
||||
HAS_HEAT_PUMP = "hasHeatPump"
|
||||
|
||||
DEFAULT_MIN_HUMIDITY = 15
|
||||
@@ -103,6 +103,13 @@ ECOBEE_HVAC_ACTION_TO_HASS = {
|
||||
"compWaterHeater": None,
|
||||
}
|
||||
|
||||
ECOBEE_TO_HASS_PRESET = {
|
||||
"Away": PRESET_AWAY,
|
||||
"Home": PRESET_HOME,
|
||||
"Sleep": PRESET_SLEEP,
|
||||
}
|
||||
HASS_TO_ECOBEE_PRESET = {v: k for k, v in ECOBEE_TO_HASS_PRESET.items()}
|
||||
|
||||
PRESET_TO_ECOBEE_HOLD = {
|
||||
PRESET_HOLD_NEXT_TRANSITION: "nextTransition",
|
||||
PRESET_HOLD_INDEFINITE: "indefinite",
|
||||
@@ -348,10 +355,6 @@ class Thermostat(ClimateEntity):
|
||||
self._attr_hvac_modes.insert(0, HVACMode.HEAT_COOL)
|
||||
self._attr_hvac_modes.append(HVACMode.OFF)
|
||||
|
||||
self._preset_modes = {
|
||||
comfort["climateRef"]: comfort["name"]
|
||||
for comfort in self.thermostat["program"]["climates"]
|
||||
}
|
||||
self.update_without_throttle = False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
@@ -474,7 +477,7 @@ class Thermostat(ClimateEntity):
|
||||
return self.thermostat["runtime"]["desiredFanMode"]
|
||||
|
||||
@property
|
||||
def preset_mode(self):
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return current preset mode."""
|
||||
events = self.thermostat["events"]
|
||||
for event in events:
|
||||
@@ -487,8 +490,8 @@ class Thermostat(ClimateEntity):
|
||||
):
|
||||
return PRESET_AWAY_INDEFINITELY
|
||||
|
||||
if event["holdClimateRef"] in self._preset_modes:
|
||||
return self._preset_modes[event["holdClimateRef"]]
|
||||
if name := self.comfort_settings.get(event["holdClimateRef"]):
|
||||
return ECOBEE_TO_HASS_PRESET.get(name, name)
|
||||
|
||||
# Any hold not based on a climate is a temp hold
|
||||
return PRESET_TEMPERATURE
|
||||
@@ -499,7 +502,12 @@ class Thermostat(ClimateEntity):
|
||||
self.vacation = event["name"]
|
||||
return PRESET_VACATION
|
||||
|
||||
return self._preset_modes[self.thermostat["program"]["currentClimateRef"]]
|
||||
if name := self.comfort_settings.get(
|
||||
self.thermostat["program"]["currentClimateRef"]
|
||||
):
|
||||
return ECOBEE_TO_HASS_PRESET.get(name, name)
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def hvac_mode(self):
|
||||
@@ -545,14 +553,14 @@ class Thermostat(ClimateEntity):
|
||||
return HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return device specific state attributes."""
|
||||
status = self.thermostat["equipmentStatus"]
|
||||
return {
|
||||
"fan": self.fan,
|
||||
"climate_mode": self._preset_modes[
|
||||
"climate_mode": self.comfort_settings.get(
|
||||
self.thermostat["program"]["currentClimateRef"]
|
||||
],
|
||||
),
|
||||
"equipment_running": status,
|
||||
"fan_min_on_time": self.settings["fanMinOnTime"],
|
||||
}
|
||||
@@ -577,6 +585,8 @@ class Thermostat(ClimateEntity):
|
||||
|
||||
def set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Activate a preset."""
|
||||
preset_mode = HASS_TO_ECOBEE_PRESET.get(preset_mode, preset_mode)
|
||||
|
||||
if preset_mode == self.preset_mode:
|
||||
return
|
||||
|
||||
@@ -605,25 +615,14 @@ class Thermostat(ClimateEntity):
|
||||
elif preset_mode == PRESET_NONE:
|
||||
self.data.ecobee.resume_program(self.thermostat_index)
|
||||
|
||||
elif preset_mode in self.preset_modes:
|
||||
climate_ref = None
|
||||
|
||||
for comfort in self.thermostat["program"]["climates"]:
|
||||
if comfort["name"] == preset_mode:
|
||||
climate_ref = comfort["climateRef"]
|
||||
else:
|
||||
for climate_ref, name in self.comfort_settings.items():
|
||||
if name == preset_mode:
|
||||
preset_mode = climate_ref
|
||||
break
|
||||
|
||||
if climate_ref is not None:
|
||||
self.data.ecobee.set_climate_hold(
|
||||
self.thermostat_index,
|
||||
climate_ref,
|
||||
self.hold_preference(),
|
||||
self.hold_hours(),
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning("Received unknown preset mode: %s", preset_mode)
|
||||
|
||||
else:
|
||||
self.data.ecobee.set_climate_hold(
|
||||
self.thermostat_index,
|
||||
preset_mode,
|
||||
@@ -632,11 +631,22 @@ class Thermostat(ClimateEntity):
|
||||
)
|
||||
|
||||
@property
|
||||
def preset_modes(self):
|
||||
def preset_modes(self) -> list[str] | None:
|
||||
"""Return available preset modes."""
|
||||
# Return presets provided by the ecobee API, and an indefinite away
|
||||
# preset which we handle separately in set_preset_mode().
|
||||
return [*self._preset_modes.values(), PRESET_AWAY_INDEFINITELY]
|
||||
return [
|
||||
ECOBEE_TO_HASS_PRESET.get(name, name)
|
||||
for name in self.comfort_settings.values()
|
||||
] + [PRESET_AWAY_INDEFINITELY]
|
||||
|
||||
@property
|
||||
def comfort_settings(self) -> dict[str, str]:
|
||||
"""Return ecobee API comfort settings."""
|
||||
return {
|
||||
comfort["climateRef"]: comfort["name"]
|
||||
for comfort in self.thermostat["program"]["climates"]
|
||||
}
|
||||
|
||||
def set_auto_temp_hold(self, heat_temp, cool_temp):
|
||||
"""Set temperature hold in auto mode."""
|
||||
|
@@ -46,6 +46,7 @@ PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.HUMIDIFIER,
|
||||
Platform.NOTIFY,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
Platform.WEATHER,
|
||||
|
@@ -3,6 +3,7 @@
|
||||
"name": "ecobee",
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"dependencies": ["http", "repairs"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecobee",
|
||||
"homekit": {
|
||||
"models": ["EB", "ecobee*"]
|
||||
|
@@ -2,11 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.notify import ATTR_TARGET, BaseNotificationService
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
ATTR_TARGET,
|
||||
BaseNotificationService,
|
||||
NotifyEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import Ecobee, EcobeeData
|
||||
from .const import DOMAIN
|
||||
from .entity import EcobeeBaseEntity
|
||||
from .repairs import migrate_notify_issue
|
||||
|
||||
|
||||
def get_service(
|
||||
@@ -18,18 +30,25 @@ def get_service(
|
||||
if discovery_info is None:
|
||||
return None
|
||||
|
||||
data = hass.data[DOMAIN]
|
||||
data: EcobeeData = hass.data[DOMAIN]
|
||||
return EcobeeNotificationService(data.ecobee)
|
||||
|
||||
|
||||
class EcobeeNotificationService(BaseNotificationService):
|
||||
"""Implement the notification service for the Ecobee thermostat."""
|
||||
|
||||
def __init__(self, ecobee):
|
||||
def __init__(self, ecobee: Ecobee) -> None:
|
||||
"""Initialize the service."""
|
||||
self.ecobee = ecobee
|
||||
|
||||
def send_message(self, message="", **kwargs):
|
||||
async def async_send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message and raise issue."""
|
||||
migrate_notify_issue(self.hass)
|
||||
await self.hass.async_add_executor_job(
|
||||
partial(self.send_message, message, **kwargs)
|
||||
)
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message."""
|
||||
targets = kwargs.get(ATTR_TARGET)
|
||||
|
||||
@@ -39,3 +58,33 @@ class EcobeeNotificationService(BaseNotificationService):
|
||||
for target in targets:
|
||||
thermostat_index = int(target)
|
||||
self.ecobee.send_message(thermostat_index, message)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the ecobee thermostat."""
|
||||
data: EcobeeData = hass.data[DOMAIN]
|
||||
async_add_entities(
|
||||
EcobeeNotifyEntity(data, index) for index in range(len(data.ecobee.thermostats))
|
||||
)
|
||||
|
||||
|
||||
class EcobeeNotifyEntity(EcobeeBaseEntity, NotifyEntity):
|
||||
"""Implement the notification entity for the Ecobee thermostat."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, data: EcobeeData, thermostat_index: int) -> None:
|
||||
"""Initialize the thermostat."""
|
||||
super().__init__(data, thermostat_index)
|
||||
self._attr_unique_id = (
|
||||
f"{self.thermostat["identifier"]}_notify_{thermostat_index}"
|
||||
)
|
||||
|
||||
def send_message(self, message: str) -> None:
|
||||
"""Send a message."""
|
||||
self.data.ecobee.send_message(self.thermostat_index, message)
|
||||
|
37
homeassistant/components/ecobee/repairs.py
Normal file
37
homeassistant/components/ecobee/repairs.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Repairs support for Ecobee."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.components.repairs.issue_handler import ConfirmRepairFlow
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def migrate_notify_issue(hass: HomeAssistant) -> None:
|
||||
"""Ensure an issue is registered."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"migrate_notify",
|
||||
breaks_in_ha_version="2024.11.0",
|
||||
issue_domain=NOTIFY_DOMAIN,
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
translation_key="migrate_notify",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
assert issue_id == "migrate_notify"
|
||||
return ConfirmRepairFlow()
|
@@ -163,5 +163,18 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"migrate_notify": {
|
||||
"title": "Migration of Ecobee notify service",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "The Ecobee `notify` service has been migrated. A new `notify` entity per Thermostat is available now.\n\nUpdate any automations to use the new `notify.send_message` exposed by these new entities. When this is done, fix this issue and restart Home Assistant.",
|
||||
"title": "Disable legacy Ecobee notify service"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -71,7 +71,7 @@ async def _validate_input(
|
||||
if errors:
|
||||
return errors
|
||||
|
||||
device_id = get_client_device_id()
|
||||
device_id = get_client_device_id(hass, rest_url is not None)
|
||||
country = user_input[CONF_COUNTRY]
|
||||
rest_config = create_rest_config(
|
||||
aiohttp_client.async_get_clientsession(hass),
|
||||
|
@@ -12,8 +12,10 @@ CONF_OVERRIDE_MQTT_URL = "override_mqtt_url"
|
||||
CONF_VERIFY_MQTT_CERTIFICATE = "verify_mqtt_certificate"
|
||||
|
||||
SUPPORTED_LIFESPANS = (
|
||||
LifeSpan.BLADE,
|
||||
LifeSpan.BRUSH,
|
||||
LifeSpan.FILTER,
|
||||
LifeSpan.LENS_BRUSH,
|
||||
LifeSpan.SIDE_BRUSH,
|
||||
)
|
||||
|
||||
|
@@ -43,7 +43,8 @@ class EcovacsController:
|
||||
self._hass = hass
|
||||
self._devices: list[Device] = []
|
||||
self.legacy_devices: list[VacBot] = []
|
||||
self._device_id = get_client_device_id()
|
||||
rest_url = config.get(CONF_OVERRIDE_REST_URL)
|
||||
self._device_id = get_client_device_id(hass, rest_url is not None)
|
||||
country = config[CONF_COUNTRY]
|
||||
self._continent = get_continent(country)
|
||||
|
||||
@@ -52,7 +53,7 @@ class EcovacsController:
|
||||
aiohttp_client.async_get_clientsession(self._hass),
|
||||
device_id=self._device_id,
|
||||
alpha_2_country=country,
|
||||
override_rest_url=config.get(CONF_OVERRIDE_REST_URL),
|
||||
override_rest_url=rest_url,
|
||||
),
|
||||
config[CONF_USERNAME],
|
||||
md5(config[CONF_PASSWORD]),
|
||||
|
@@ -13,6 +13,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from .const import DOMAIN
|
||||
from .controller import EcovacsController
|
||||
from .entity import EcovacsEntity
|
||||
from .util import get_name_key
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -54,10 +55,7 @@ class EcovacsLastJobEventEntity(
|
||||
# we trigger only on job done
|
||||
return
|
||||
|
||||
event_type = event.status.name.lower()
|
||||
if event.status == CleanJobStatus.MANUAL_STOPPED:
|
||||
event_type = "manually_stopped"
|
||||
|
||||
event_type = get_name_key(event.status)
|
||||
self._trigger_event(event_type)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
@@ -12,12 +12,18 @@
|
||||
"relocate": {
|
||||
"default": "mdi:map-marker-question"
|
||||
},
|
||||
"reset_lifespan_blade": {
|
||||
"default": "mdi:saw-blade"
|
||||
},
|
||||
"reset_lifespan_brush": {
|
||||
"default": "mdi:broom"
|
||||
},
|
||||
"reset_lifespan_filter": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"reset_lifespan_lens_brush": {
|
||||
"default": "mdi:broom"
|
||||
},
|
||||
"reset_lifespan_side_brush": {
|
||||
"default": "mdi:broom"
|
||||
}
|
||||
@@ -42,12 +48,18 @@
|
||||
"error": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"lifespan_blade": {
|
||||
"default": "mdi:saw-blade"
|
||||
},
|
||||
"lifespan_brush": {
|
||||
"default": "mdi:broom"
|
||||
},
|
||||
"lifespan_filter": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"lifespan_lens_brush": {
|
||||
"default": "mdi:broom"
|
||||
},
|
||||
"lifespan_side_brush": {
|
||||
"default": "mdi:broom"
|
||||
},
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.9", "deebot-client==6.0.2"]
|
||||
"requirements": ["py-sucks==0.9.9", "deebot-client==7.1.0"]
|
||||
}
|
||||
|
@@ -22,7 +22,7 @@ from .entity import (
|
||||
EcovacsDescriptionEntity,
|
||||
EventT,
|
||||
)
|
||||
from .util import get_supported_entitites
|
||||
from .util import get_name_key, get_supported_entitites
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
@@ -41,8 +41,8 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
EcovacsSelectEntityDescription[WaterInfoEvent](
|
||||
device_capabilities=VacuumCapabilities,
|
||||
capability_fn=lambda caps: caps.water,
|
||||
current_option_fn=lambda e: e.amount.display_name,
|
||||
options_fn=lambda water: [amount.display_name for amount in water.types],
|
||||
current_option_fn=lambda e: get_name_key(e.amount),
|
||||
options_fn=lambda water: [get_name_key(amount) for amount in water.types],
|
||||
key="water_amount",
|
||||
translation_key="water_amount",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -50,8 +50,8 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
EcovacsSelectEntityDescription[WorkModeEvent](
|
||||
device_capabilities=VacuumCapabilities,
|
||||
capability_fn=lambda caps: caps.clean.work_mode,
|
||||
current_option_fn=lambda e: e.mode.display_name,
|
||||
options_fn=lambda cap: [mode.display_name for mode in cap.types],
|
||||
current_option_fn=lambda e: get_name_key(e.mode),
|
||||
options_fn=lambda cap: [get_name_key(mode) for mode in cap.types],
|
||||
key="work_mode",
|
||||
translation_key="work_mode",
|
||||
entity_registry_enabled_default=False,
|
||||
|
@@ -46,12 +46,18 @@
|
||||
"relocate": {
|
||||
"name": "Relocate"
|
||||
},
|
||||
"reset_lifespan_blade": {
|
||||
"name": "Reset blade lifespan"
|
||||
},
|
||||
"reset_lifespan_brush": {
|
||||
"name": "Reset main brush lifespan"
|
||||
},
|
||||
"reset_lifespan_filter": {
|
||||
"name": "Reset filter lifespan"
|
||||
},
|
||||
"reset_lifespan_lens_brush": {
|
||||
"name": "Reset lens brush lifespan"
|
||||
},
|
||||
"reset_lifespan_side_brush": {
|
||||
"name": "Reset side brushes lifespan"
|
||||
}
|
||||
@@ -92,12 +98,18 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"lifespan_blade": {
|
||||
"name": "Blade lifespan"
|
||||
},
|
||||
"lifespan_brush": {
|
||||
"name": "Main brush lifespan"
|
||||
},
|
||||
"lifespan_filter": {
|
||||
"name": "Filter lifespan"
|
||||
},
|
||||
"lifespan_lens_brush": {
|
||||
"name": "Lens brush lifespan"
|
||||
},
|
||||
"lifespan_side_brush": {
|
||||
"name": "Side brushes lifespan"
|
||||
},
|
||||
|
@@ -2,12 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
import random
|
||||
import string
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from deebot_client.capabilities import Capabilities
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .entity import (
|
||||
EcovacsCapabilityEntityDescription,
|
||||
EcovacsDescriptionEntity,
|
||||
@@ -18,8 +22,11 @@ if TYPE_CHECKING:
|
||||
from .controller import EcovacsController
|
||||
|
||||
|
||||
def get_client_device_id() -> str:
|
||||
def get_client_device_id(hass: HomeAssistant, self_hosted: bool) -> str:
|
||||
"""Get client device id."""
|
||||
if self_hosted:
|
||||
return f"HA-{slugify(hass.config.location_name)}"
|
||||
|
||||
return "".join(
|
||||
random.choice(string.ascii_uppercase + string.digits) for _ in range(8)
|
||||
)
|
||||
@@ -38,3 +45,9 @@ def get_supported_entitites(
|
||||
if isinstance(device.capabilities, description.device_capabilities)
|
||||
if (capability := description.capability_fn(device.capabilities))
|
||||
]
|
||||
|
||||
|
||||
@callback
|
||||
def get_name_key(enum: Enum) -> str:
|
||||
"""Return the lower case name of the enum."""
|
||||
return enum.name.lower()
|
||||
|
@@ -33,6 +33,7 @@ from homeassistant.util import slugify
|
||||
from .const import DOMAIN
|
||||
from .controller import EcovacsController
|
||||
from .entity import EcovacsEntity
|
||||
from .util import get_name_key
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -242,7 +243,7 @@ class EcovacsVacuum(
|
||||
self._rooms: list[Room] = []
|
||||
|
||||
self._attr_fan_speed_list = [
|
||||
level.display_name for level in capabilities.fan_speed.types
|
||||
get_name_key(level) for level in capabilities.fan_speed.types
|
||||
]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
@@ -254,7 +255,7 @@ class EcovacsVacuum(
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def on_fan_speed(event: FanSpeedEvent) -> None:
|
||||
self._attr_fan_speed = event.speed.display_name
|
||||
self._attr_fan_speed = get_name_key(event.speed)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def on_rooms(event: RoomsEvent) -> None:
|
||||
|
@@ -86,8 +86,8 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
continue
|
||||
|
||||
if payload_dict:
|
||||
payload = "{%s}" % ",".join(
|
||||
f"{key}:{val}" for key, val in payload_dict.items()
|
||||
payload = "{{{}}}".format(
|
||||
",".join(f"{key}:{val}" for key, val in payload_dict.items())
|
||||
)
|
||||
|
||||
send_data(
|
||||
|
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Mapping
|
||||
import copy
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
@@ -167,8 +167,7 @@ class SensorManager:
|
||||
if adapter.flow_type is None:
|
||||
self._process_sensor_data(
|
||||
adapter,
|
||||
# Opting out of the type complexity because can't get it to work
|
||||
energy_source, # type: ignore[arg-type]
|
||||
energy_source,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
@@ -177,8 +176,7 @@ class SensorManager:
|
||||
for flow in energy_source[adapter.flow_type]: # type: ignore[typeddict-item]
|
||||
self._process_sensor_data(
|
||||
adapter,
|
||||
# Opting out of the type complexity because can't get it to work
|
||||
flow, # type: ignore[arg-type]
|
||||
flow,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
@@ -189,7 +187,7 @@ class SensorManager:
|
||||
def _process_sensor_data(
|
||||
self,
|
||||
adapter: SourceAdapter,
|
||||
config: dict,
|
||||
config: Mapping[str, Any],
|
||||
to_add: list[EnergyCostSensor],
|
||||
to_remove: dict[tuple[str, str | None, str], EnergyCostSensor],
|
||||
) -> None:
|
||||
@@ -241,7 +239,7 @@ class EnergyCostSensor(SensorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
adapter: SourceAdapter,
|
||||
config: dict,
|
||||
config: Mapping[str, Any],
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__()
|
||||
@@ -456,7 +454,7 @@ class EnergyCostSensor(SensorEntity):
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
@callback
|
||||
def update_config(self, config: dict) -> None:
|
||||
def update_config(self, config: Mapping[str, Any]) -> None:
|
||||
"""Update the config."""
|
||||
self._config = config
|
||||
|
||||
|
@@ -31,7 +31,7 @@ from .data import (
|
||||
EnergyPreferencesUpdate,
|
||||
async_get_manager,
|
||||
)
|
||||
from .types import EnergyPlatform, GetSolarForecastType
|
||||
from .types import EnergyPlatform, GetSolarForecastType, SolarForecastType
|
||||
from .validate import async_validate
|
||||
|
||||
EnergyWebSocketCommandHandler = Callable[
|
||||
@@ -203,19 +203,18 @@ async def ws_solar_forecast(
|
||||
for source in manager.data["energy_sources"]:
|
||||
if (
|
||||
source["type"] != "solar"
|
||||
or source.get("config_entry_solar_forecast") is None
|
||||
or (solar_forecast := source.get("config_entry_solar_forecast")) is None
|
||||
):
|
||||
continue
|
||||
|
||||
# typing is not catching the above guard for config_entry_solar_forecast being none
|
||||
for config_entry in source["config_entry_solar_forecast"]: # type: ignore[union-attr]
|
||||
config_entries[config_entry] = None
|
||||
for entry in solar_forecast:
|
||||
config_entries[entry] = None
|
||||
|
||||
if not config_entries:
|
||||
connection.send_result(msg["id"], {})
|
||||
return
|
||||
|
||||
forecasts = {}
|
||||
forecasts: dict[str, SolarForecastType] = {}
|
||||
|
||||
forecast_platforms = await async_get_energy_platforms(hass)
|
||||
|
||||
|
@@ -46,6 +46,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
coordinator: EnphaseUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
coordinator.async_cancel_token_refresh()
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
@@ -89,6 +89,14 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by zeroconf discovery."""
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
current_hosts = self._async_current_hosts()
|
||||
_LOGGER.debug(
|
||||
"Zeroconf ip %s processing %s, current hosts: %s",
|
||||
discovery_info.ip_address.version,
|
||||
discovery_info.host,
|
||||
current_hosts,
|
||||
)
|
||||
if discovery_info.ip_address.version != 4:
|
||||
return self.async_abort(reason="not_ipv4_address")
|
||||
serial = discovery_info.properties["serialnum"]
|
||||
@@ -96,17 +104,27 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(serial)
|
||||
self.ip_address = discovery_info.host
|
||||
self._abort_if_unique_id_configured({CONF_HOST: self.ip_address})
|
||||
_LOGGER.debug(
|
||||
"Zeroconf ip %s, fw %s, no existing entry with serial %s",
|
||||
self.ip_address,
|
||||
self.protovers,
|
||||
serial,
|
||||
)
|
||||
for entry in self._async_current_entries(include_ignore=False):
|
||||
if (
|
||||
entry.unique_id is None
|
||||
and CONF_HOST in entry.data
|
||||
and entry.data[CONF_HOST] == self.ip_address
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"Zeroconf update envoy with this ip and blank serial in unique_id",
|
||||
)
|
||||
title = f"{ENVOY} {serial}" if entry.title == ENVOY else ENVOY
|
||||
return self.async_update_reload_and_abort(
|
||||
entry, title=title, unique_id=serial, reason="already_configured"
|
||||
)
|
||||
|
||||
_LOGGER.debug("Zeroconf ip %s to step user", self.ip_address)
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_reauth(
|
||||
|
@@ -83,9 +83,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
def _async_mark_setup_complete(self) -> None:
|
||||
"""Mark setup as complete and setup token refresh if needed."""
|
||||
self._setup_complete = True
|
||||
if self._cancel_token_refresh:
|
||||
self._cancel_token_refresh()
|
||||
self._cancel_token_refresh = None
|
||||
self.async_cancel_token_refresh()
|
||||
if not isinstance(self.envoy.auth, EnvoyTokenAuth):
|
||||
return
|
||||
self._cancel_token_refresh = async_track_time_interval(
|
||||
@@ -159,3 +157,10 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
return envoy_data.raw
|
||||
|
||||
raise RuntimeError("Unreachable code in _async_update_data") # pragma: no cover
|
||||
|
||||
@callback
|
||||
def async_cancel_token_refresh(self) -> None:
|
||||
"""Cancel token refresh."""
|
||||
if self._cancel_token_refresh:
|
||||
self._cancel_token_refresh()
|
||||
self._cancel_token_refresh = None
|
||||
|
35
homeassistant/components/epic_games_store/__init__.py
Normal file
35
homeassistant/components/epic_games_store/__init__.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""The Epic Games Store integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import EGSCalendarUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.CALENDAR,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Epic Games Store from a config entry."""
|
||||
|
||||
coordinator = EGSCalendarUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
97
homeassistant/components/epic_games_store/calendar.py
Normal file
97
homeassistant/components/epic_games_store/calendar.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""Calendar platform for a Epic Games Store."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import namedtuple
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, CalendarType
|
||||
from .coordinator import EGSCalendarUpdateCoordinator
|
||||
|
||||
DateRange = namedtuple("DateRange", ["start", "end"])
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the local calendar platform."""
|
||||
coordinator: EGSCalendarUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
entities = [
|
||||
EGSCalendar(coordinator, entry.entry_id, CalendarType.FREE),
|
||||
EGSCalendar(coordinator, entry.entry_id, CalendarType.DISCOUNT),
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class EGSCalendar(CoordinatorEntity[EGSCalendarUpdateCoordinator], CalendarEntity):
|
||||
"""A calendar entity by Epic Games Store."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EGSCalendarUpdateCoordinator,
|
||||
config_entry_id: str,
|
||||
cal_type: CalendarType,
|
||||
) -> None:
|
||||
"""Initialize EGSCalendar."""
|
||||
super().__init__(coordinator)
|
||||
self._cal_type = cal_type
|
||||
self._attr_translation_key = f"{cal_type}_games"
|
||||
self._attr_unique_id = f"{config_entry_id}-{cal_type}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, config_entry_id)},
|
||||
manufacturer="Epic Games Store",
|
||||
name="Epic Games Store",
|
||||
)
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
"""Return the next upcoming event."""
|
||||
if event := self.coordinator.data[self._cal_type]:
|
||||
return _get_calendar_event(event[0])
|
||||
return None
|
||||
|
||||
async def async_get_events(
|
||||
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
||||
) -> list[CalendarEvent]:
|
||||
"""Get all events in a specific time frame."""
|
||||
events = filter(
|
||||
lambda game: _are_date_range_overlapping(
|
||||
DateRange(start=game["discount_start_at"], end=game["discount_end_at"]),
|
||||
DateRange(start=start_date, end=end_date),
|
||||
),
|
||||
self.coordinator.data[self._cal_type],
|
||||
)
|
||||
return [_get_calendar_event(event) for event in events]
|
||||
|
||||
|
||||
def _get_calendar_event(event: dict[str, Any]) -> CalendarEvent:
|
||||
"""Return a CalendarEvent from an API event."""
|
||||
return CalendarEvent(
|
||||
summary=event["title"],
|
||||
start=event["discount_start_at"],
|
||||
end=event["discount_end_at"],
|
||||
description=f"{event['description']}\n\n{event['url']}",
|
||||
)
|
||||
|
||||
|
||||
def _are_date_range_overlapping(range1: DateRange, range2: DateRange) -> bool:
|
||||
"""Return a CalendarEvent from an API event."""
|
||||
latest_start = max(range1.start, range2.start)
|
||||
earliest_end = min(range1.end, range2.end)
|
||||
delta = (earliest_end - latest_start).days + 1
|
||||
overlap = max(0, delta)
|
||||
return overlap > 0
|
96
homeassistant/components/epic_games_store/config_flow.py
Normal file
96
homeassistant/components/epic_games_store/config_flow.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Config flow for Epic Games Store integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from epicstore_api import EpicGamesStoreAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.config_entries import ConfigFlowResult
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.selector import (
|
||||
CountrySelector,
|
||||
LanguageSelector,
|
||||
LanguageSelectorConfig,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, SUPPORTED_LANGUAGES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_LANGUAGE): LanguageSelector(
|
||||
LanguageSelectorConfig(languages=SUPPORTED_LANGUAGES)
|
||||
),
|
||||
vol.Required(CONF_COUNTRY): CountrySelector(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_default_language(hass: HomeAssistant) -> str | None:
|
||||
"""Get default language code based on Home Assistant config."""
|
||||
language_code = f"{hass.config.language}-{hass.config.country}"
|
||||
if language_code in SUPPORTED_LANGUAGES:
|
||||
return language_code
|
||||
if hass.config.language in SUPPORTED_LANGUAGES:
|
||||
return hass.config.language
|
||||
return None
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, user_input: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect."""
|
||||
api = EpicGamesStoreAPI(user_input[CONF_LANGUAGE], user_input[CONF_COUNTRY])
|
||||
data = await hass.async_add_executor_job(api.get_free_games)
|
||||
|
||||
if data.get("errors"):
|
||||
_LOGGER.warning(data["errors"])
|
||||
|
||||
assert data["data"]["Catalog"]["searchStore"]["elements"]
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Epic Games Store."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA,
|
||||
user_input
|
||||
or {
|
||||
CONF_LANGUAGE: get_default_language(self.hass),
|
||||
CONF_COUNTRY: self.hass.config.country,
|
||||
},
|
||||
)
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=data_schema)
|
||||
|
||||
await self.async_set_unique_id(
|
||||
f"freegames-{user_input[CONF_LANGUAGE]}-{user_input[CONF_COUNTRY]}"
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
errors = {}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=f"Epic Games Store - Free Games ({user_input[CONF_LANGUAGE]}-{user_input[CONF_COUNTRY]})",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=data_schema, errors=errors
|
||||
)
|
31
homeassistant/components/epic_games_store/const.py
Normal file
31
homeassistant/components/epic_games_store/const.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Constants for the Epic Games Store integration."""
|
||||
|
||||
from enum import StrEnum
|
||||
|
||||
DOMAIN = "epic_games_store"
|
||||
|
||||
SUPPORTED_LANGUAGES = [
|
||||
"ar",
|
||||
"de",
|
||||
"en-US",
|
||||
"es-ES",
|
||||
"es-MX",
|
||||
"fr",
|
||||
"it",
|
||||
"ja",
|
||||
"ko",
|
||||
"pl",
|
||||
"pt-BR",
|
||||
"ru",
|
||||
"th",
|
||||
"tr",
|
||||
"zh-CN",
|
||||
"zh-Hant",
|
||||
]
|
||||
|
||||
|
||||
class CalendarType(StrEnum):
|
||||
"""Calendar types."""
|
||||
|
||||
FREE = "free"
|
||||
DISCOUNT = "discount"
|
81
homeassistant/components/epic_games_store/coordinator.py
Normal file
81
homeassistant/components/epic_games_store/coordinator.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""The Epic Games Store integration data coordinator."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from epicstore_api import EpicGamesStoreAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, CalendarType
|
||||
from .helper import format_game_data
|
||||
|
||||
SCAN_INTERVAL = timedelta(days=1)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EGSCalendarUpdateCoordinator(
|
||||
DataUpdateCoordinator[dict[str, list[dict[str, Any]]]]
|
||||
):
|
||||
"""Class to manage fetching data from the Epic Game Store."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
self._api = EpicGamesStoreAPI(
|
||||
entry.data[CONF_LANGUAGE],
|
||||
entry.data[CONF_COUNTRY],
|
||||
)
|
||||
self.language = entry.data[CONF_LANGUAGE]
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, list[dict[str, Any]]]:
|
||||
"""Update data via library."""
|
||||
raw_data = await self.hass.async_add_executor_job(self._api.get_free_games)
|
||||
_LOGGER.debug(raw_data)
|
||||
data = raw_data["data"]["Catalog"]["searchStore"]["elements"]
|
||||
|
||||
discount_games = filter(
|
||||
lambda game: game.get("promotions")
|
||||
and (
|
||||
# Current discount(s)
|
||||
game["promotions"]["promotionalOffers"]
|
||||
or
|
||||
# Upcoming discount(s)
|
||||
game["promotions"]["upcomingPromotionalOffers"]
|
||||
),
|
||||
data,
|
||||
)
|
||||
|
||||
return_data: dict[str, list[dict[str, Any]]] = {
|
||||
CalendarType.DISCOUNT: [],
|
||||
CalendarType.FREE: [],
|
||||
}
|
||||
for discount_game in discount_games:
|
||||
game = format_game_data(discount_game, self.language)
|
||||
|
||||
if game["discount_type"]:
|
||||
return_data[game["discount_type"]].append(game)
|
||||
|
||||
return_data[CalendarType.DISCOUNT] = sorted(
|
||||
return_data[CalendarType.DISCOUNT],
|
||||
key=lambda game: game["discount_start_at"],
|
||||
)
|
||||
return_data[CalendarType.FREE] = sorted(
|
||||
return_data[CalendarType.FREE], key=lambda game: game["discount_start_at"]
|
||||
)
|
||||
|
||||
_LOGGER.debug(return_data)
|
||||
return return_data
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user