mirror of
https://github.com/home-assistant/core.git
synced 2025-08-30 01:42:21 +02:00
Merge branch 'dev_target_triggers_conditions' into light_target_condition
This commit is contained in:
14
.github/workflows/builder.yml
vendored
14
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
publish: ${{ steps.version.outputs.publish }}
|
publish: ${{ steps.version.outputs.publish }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -90,7 +90,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Download nightly wheels of frontend
|
- name: Download nightly wheels of frontend
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
@@ -242,7 +242,7 @@ jobs:
|
|||||||
- green
|
- green
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Set build additional args
|
- name: Set build additional args
|
||||||
run: |
|
run: |
|
||||||
@@ -279,7 +279,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
uses: home-assistant/actions/helpers/git-init@master
|
uses: home-assistant/actions/helpers/git-init@master
|
||||||
@@ -321,7 +321,7 @@ jobs:
|
|||||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.9.2
|
uses: sigstore/cosign-installer@v3.9.2
|
||||||
@@ -454,7 +454,7 @@ jobs:
|
|||||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -499,7 +499,7 @@ jobs:
|
|||||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
|
94
.github/workflows/ci.yaml
vendored
94
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CACHE_VERSION: 4
|
CACHE_VERSION: 7
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 1
|
MYPY_CACHE_VERSION: 1
|
||||||
HA_SHORT_VERSION: "2025.9"
|
HA_SHORT_VERSION: "2025.9"
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Generate partial Python venv restore key
|
- name: Generate partial Python venv restore key
|
||||||
id: generate_python_cache_key
|
id: generate_python_cache_key
|
||||||
run: |
|
run: |
|
||||||
@@ -246,7 +246,7 @@ jobs:
|
|||||||
- info
|
- info
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -255,7 +255,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@@ -271,7 +271,7 @@ jobs:
|
|||||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@@ -292,7 +292,7 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
@@ -301,7 +301,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -310,7 +310,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -332,7 +332,7 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
@@ -341,7 +341,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -350,7 +350,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -372,7 +372,7 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
@@ -381,7 +381,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -390,7 +390,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -462,7 +462,7 @@ jobs:
|
|||||||
- script/hassfest/docker/Dockerfile
|
- script/hassfest/docker/Dockerfile
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -481,7 +481,7 @@ jobs:
|
|||||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -497,7 +497,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@@ -505,7 +505,7 @@ jobs:
|
|||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore uv wheel cache
|
- name: Restore uv wheel cache
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.UV_CACHE_DIR }}
|
path: ${{ env.UV_CACHE_DIR }}
|
||||||
key: >-
|
key: >-
|
||||||
@@ -584,7 +584,7 @@ jobs:
|
|||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
libturbojpeg
|
libturbojpeg
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -593,7 +593,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -617,7 +617,7 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -626,7 +626,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -651,9 +651,9 @@ jobs:
|
|||||||
&& github.event_name == 'pull_request'
|
&& github.event_name == 'pull_request'
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Dependency review
|
- name: Dependency review
|
||||||
uses: actions/dependency-review-action@v4.7.1
|
uses: actions/dependency-review-action@v4.7.2
|
||||||
with:
|
with:
|
||||||
license-check: false # We use our own license audit checks
|
license-check: false # We use our own license audit checks
|
||||||
|
|
||||||
@@ -674,7 +674,7 @@ jobs:
|
|||||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -683,7 +683,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -717,7 +717,7 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -726,7 +726,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -764,7 +764,7 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -773,7 +773,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -809,7 +809,7 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -825,7 +825,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -833,7 +833,7 @@ jobs:
|
|||||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore mypy cache
|
- name: Restore mypy cache
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: .mypy_cache
|
path: .mypy_cache
|
||||||
key: >-
|
key: >-
|
||||||
@@ -886,7 +886,7 @@ jobs:
|
|||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev
|
libgammu-dev
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -895,7 +895,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -947,7 +947,7 @@ jobs:
|
|||||||
libgammu-dev \
|
libgammu-dev \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -956,7 +956,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1080,7 +1080,7 @@ jobs:
|
|||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -1089,7 +1089,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1222,7 +1222,7 @@ jobs:
|
|||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
postgresql-server-dev-14
|
postgresql-server-dev-14
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -1231,7 +1231,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1334,14 +1334,14 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'true'
|
if: needs.info.outputs.test_full_suite == 'true'
|
||||||
uses: codecov/codecov-action@v5.4.3
|
uses: codecov/codecov-action@v5.5.0
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
flags: full-suite
|
flags: full-suite
|
||||||
@@ -1381,7 +1381,7 @@ jobs:
|
|||||||
libgammu-dev \
|
libgammu-dev \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -1390,7 +1390,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1484,14 +1484,14 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'false'
|
if: needs.info.outputs.test_full_suite == 'false'
|
||||||
uses: codecov/codecov-action@v5.4.3
|
uses: codecov/codecov-action@v5.5.0
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.29.5
|
uses: github/codeql-action/init@v3.29.11
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.29.5
|
uses: github/codeql-action/analyze@v3.29.11
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
@@ -231,7 +231,7 @@ jobs:
|
|||||||
- name: Detect duplicates using AI
|
- name: Detect duplicates using AI
|
||||||
id: ai_detection
|
id: ai_detection
|
||||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||||
uses: actions/ai-inference@v1.2.8
|
uses: actions/ai-inference@v2.0.0
|
||||||
with:
|
with:
|
||||||
model: openai/gpt-4o
|
model: openai/gpt-4o
|
||||||
system-prompt: |
|
system-prompt: |
|
||||||
|
@@ -57,7 +57,7 @@ jobs:
|
|||||||
- name: Detect language using AI
|
- name: Detect language using AI
|
||||||
id: ai_language_detection
|
id: ai_language_detection
|
||||||
if: steps.detect_language.outputs.should_continue == 'true'
|
if: steps.detect_language.outputs.should_continue == 'true'
|
||||||
uses: actions/ai-inference@v1.2.8
|
uses: actions/ai-inference@v2.0.0
|
||||||
with:
|
with:
|
||||||
model: openai/gpt-4o-mini
|
model: openai/gpt-4o-mini
|
||||||
system-prompt: |
|
system-prompt: |
|
||||||
|
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
check-authorization:
|
check-authorization:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# Only run if this is a Task issue type (from the issue form)
|
# Only run if this is a Task issue type (from the issue form)
|
||||||
if: github.event.issue.issue_type == 'Task'
|
if: github.event.issue.type.name == 'Task'
|
||||||
steps:
|
steps:
|
||||||
- name: Check if user is authorized
|
- name: Check if user is authorized
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
|
6
.github/workflows/wheels.yml
vendored
6
.github/workflows/wheels.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
|||||||
architectures: ${{ steps.info.outputs.architectures }}
|
architectures: ${{ steps.info.outputs.architectures }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
@@ -135,7 +135,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
@@ -184,7 +184,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v5.0.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
|
@@ -18,7 +18,7 @@ repos:
|
|||||||
exclude_types: [csv, json, html]
|
exclude_types: [csv, json, html]
|
||||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v5.0.0
|
rev: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
|
@@ -310,7 +310,6 @@ homeassistant.components.letpot.*
|
|||||||
homeassistant.components.lidarr.*
|
homeassistant.components.lidarr.*
|
||||||
homeassistant.components.lifx.*
|
homeassistant.components.lifx.*
|
||||||
homeassistant.components.light.*
|
homeassistant.components.light.*
|
||||||
homeassistant.components.linear_garage_door.*
|
|
||||||
homeassistant.components.linkplay.*
|
homeassistant.components.linkplay.*
|
||||||
homeassistant.components.litejet.*
|
homeassistant.components.litejet.*
|
||||||
homeassistant.components.litterrobot.*
|
homeassistant.components.litterrobot.*
|
||||||
@@ -467,6 +466,7 @@ homeassistant.components.simplisafe.*
|
|||||||
homeassistant.components.siren.*
|
homeassistant.components.siren.*
|
||||||
homeassistant.components.skybell.*
|
homeassistant.components.skybell.*
|
||||||
homeassistant.components.slack.*
|
homeassistant.components.slack.*
|
||||||
|
homeassistant.components.sleep_as_android.*
|
||||||
homeassistant.components.sleepiq.*
|
homeassistant.components.sleepiq.*
|
||||||
homeassistant.components.smhi.*
|
homeassistant.components.smhi.*
|
||||||
homeassistant.components.smlight.*
|
homeassistant.components.smlight.*
|
||||||
|
16
CODEOWNERS
generated
16
CODEOWNERS
generated
@@ -156,8 +156,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
||||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
|
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||||
/tests/components/asuswrt/ @kennedyshead @ollo69
|
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||||
/homeassistant/components/atag/ @MatsNL
|
/homeassistant/components/atag/ @MatsNL
|
||||||
/tests/components/atag/ @MatsNL
|
/tests/components/atag/ @MatsNL
|
||||||
/homeassistant/components/aten_pe/ @mtdcr
|
/homeassistant/components/aten_pe/ @mtdcr
|
||||||
@@ -422,6 +422,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/emby/ @mezz64
|
/homeassistant/components/emby/ @mezz64
|
||||||
/homeassistant/components/emoncms/ @borpin @alexandrecuer
|
/homeassistant/components/emoncms/ @borpin @alexandrecuer
|
||||||
/tests/components/emoncms/ @borpin @alexandrecuer
|
/tests/components/emoncms/ @borpin @alexandrecuer
|
||||||
|
/homeassistant/components/emoncms_history/ @alexandrecuer
|
||||||
|
/tests/components/emoncms_history/ @alexandrecuer
|
||||||
/homeassistant/components/emonitor/ @bdraco
|
/homeassistant/components/emonitor/ @bdraco
|
||||||
/tests/components/emonitor/ @bdraco
|
/tests/components/emonitor/ @bdraco
|
||||||
/homeassistant/components/emulated_hue/ @bdraco @Tho85
|
/homeassistant/components/emulated_hue/ @bdraco @Tho85
|
||||||
@@ -438,8 +440,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/enigma2/ @autinerd
|
/tests/components/enigma2/ @autinerd
|
||||||
/homeassistant/components/enocean/ @bdurrer
|
/homeassistant/components/enocean/ @bdurrer
|
||||||
/tests/components/enocean/ @bdurrer
|
/tests/components/enocean/ @bdurrer
|
||||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||||
@@ -862,8 +864,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/lifx/ @Djelibeybi
|
/tests/components/lifx/ @Djelibeybi
|
||||||
/homeassistant/components/light/ @home-assistant/core
|
/homeassistant/components/light/ @home-assistant/core
|
||||||
/tests/components/light/ @home-assistant/core
|
/tests/components/light/ @home-assistant/core
|
||||||
/homeassistant/components/linear_garage_door/ @IceBotYT
|
|
||||||
/tests/components/linear_garage_door/ @IceBotYT
|
|
||||||
/homeassistant/components/linkplay/ @Velleman
|
/homeassistant/components/linkplay/ @Velleman
|
||||||
/tests/components/linkplay/ @Velleman
|
/tests/components/linkplay/ @Velleman
|
||||||
/homeassistant/components/linux_battery/ @fabaff
|
/homeassistant/components/linux_battery/ @fabaff
|
||||||
@@ -1417,6 +1417,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/skybell/ @tkdrob
|
/tests/components/skybell/ @tkdrob
|
||||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||||
/tests/components/slack/ @tkdrob @fletcherau
|
/tests/components/slack/ @tkdrob @fletcherau
|
||||||
|
/homeassistant/components/sleep_as_android/ @tr4nt0r
|
||||||
|
/tests/components/sleep_as_android/ @tr4nt0r
|
||||||
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
||||||
/tests/components/sleepiq/ @mfugate1 @kbickar
|
/tests/components/sleepiq/ @mfugate1 @kbickar
|
||||||
/homeassistant/components/slide/ @ualex73
|
/homeassistant/components/slide/ @ualex73
|
||||||
@@ -1599,6 +1601,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/todo/ @home-assistant/core
|
/tests/components/todo/ @home-assistant/core
|
||||||
/homeassistant/components/todoist/ @boralyl
|
/homeassistant/components/todoist/ @boralyl
|
||||||
/tests/components/todoist/ @boralyl
|
/tests/components/todoist/ @boralyl
|
||||||
|
/homeassistant/components/togrill/ @elupus
|
||||||
|
/tests/components/togrill/ @elupus
|
||||||
/homeassistant/components/tolo/ @MatthiasLohr
|
/homeassistant/components/tolo/ @MatthiasLohr
|
||||||
/tests/components/tolo/ @MatthiasLohr
|
/tests/components/tolo/ @MatthiasLohr
|
||||||
/homeassistant/components/tomorrowio/ @raman325 @lymanepp
|
/homeassistant/components/tomorrowio/ @raman325 @lymanepp
|
||||||
|
@@ -14,5 +14,8 @@ Still interested? Then you should take a peek at the [developer documentation](h
|
|||||||
|
|
||||||
## Feature suggestions
|
## Feature suggestions
|
||||||
|
|
||||||
If you want to suggest a new feature for Home Assistant (e.g., new integrations), please open a thread in our [Community Forum: Feature Requests](https://community.home-assistant.io/c/feature-requests).
|
If you want to suggest a new feature for Home Assistant (e.g. new integrations), please [start a discussion](https://github.com/orgs/home-assistant/discussions) on GitHub.
|
||||||
We use [GitHub for tracking issues](https://github.com/home-assistant/core/issues), not for tracking feature requests.
|
|
||||||
|
## Issue Tracker
|
||||||
|
|
||||||
|
If you want to report an issue, please [create an issue](https://github.com/home-assistant/core/issues) on GitHub.
|
||||||
|
2
Dockerfile
generated
2
Dockerfile
generated
@@ -31,7 +31,7 @@ RUN \
|
|||||||
&& go2rtc --version
|
&& go2rtc --version
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.7.1
|
RUN pip3 install uv==0.8.9
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
@@ -61,7 +61,7 @@
|
|||||||
"display_pm_standard": {
|
"display_pm_standard": {
|
||||||
"name": "Display PM standard",
|
"name": "Display PM standard",
|
||||||
"state": {
|
"state": {
|
||||||
"ugm3": "µg/m³",
|
"ugm3": "μg/m³",
|
||||||
"us_aqi": "US AQI"
|
"us_aqi": "US AQI"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@@ -10,7 +10,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||||||
|
|
||||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||||
|
|
||||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
_PLATFORMS: list[Platform] = [
|
||||||
|
Platform.BINARY_SENSOR,
|
||||||
|
Platform.SENSOR,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||||
|
106
homeassistant/components/airos/binary_sensor.py
Normal file
106
homeassistant/components/airos/binary_sensor.py
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
"""AirOS Binary Sensor component for Home Assistant."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from homeassistant.components.binary_sensor import (
|
||||||
|
BinarySensorDeviceClass,
|
||||||
|
BinarySensorEntity,
|
||||||
|
BinarySensorEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.const import EntityCategory
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
|
||||||
|
from .entity import AirOSEntity
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||||
|
"""Describe an AirOS binary sensor."""
|
||||||
|
|
||||||
|
value_fn: Callable[[AirOSData], bool]
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="portfw",
|
||||||
|
translation_key="port_forwarding",
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.portfw,
|
||||||
|
),
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="dhcp_client",
|
||||||
|
translation_key="dhcp_client",
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.services.dhcpc,
|
||||||
|
),
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="dhcp_server",
|
||||||
|
translation_key="dhcp_server",
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.services.dhcpd,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="dhcp6_server",
|
||||||
|
translation_key="dhcp6_server",
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.services.dhcp6d_stateful,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="pppoe",
|
||||||
|
translation_key="pppoe",
|
||||||
|
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.services.pppoe,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: AirOSConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the AirOS binary sensors from a config entry."""
|
||||||
|
coordinator = config_entry.runtime_data
|
||||||
|
|
||||||
|
async_add_entities(
|
||||||
|
AirOSBinarySensor(coordinator, description) for description in BINARY_SENSORS
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AirOSBinarySensor(AirOSEntity, BinarySensorEntity):
|
||||||
|
"""Representation of a binary sensor."""
|
||||||
|
|
||||||
|
entity_description: AirOSBinarySensorEntityDescription
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AirOSDataUpdateCoordinator,
|
||||||
|
description: AirOSBinarySensorEntityDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the binary sensor."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
|
||||||
|
self.entity_description = description
|
||||||
|
self._attr_unique_id = f"{coordinator.data.host.device_id}_{description.key}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return the state of the binary sensor."""
|
||||||
|
return self.entity_description.value_fn(self.coordinator.data)
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["airos==0.2.4"]
|
"requirements": ["airos==0.4.3"]
|
||||||
}
|
}
|
||||||
|
@@ -54,9 +54,7 @@ rules:
|
|||||||
dynamic-devices: todo
|
dynamic-devices: todo
|
||||||
entity-category: done
|
entity-category: done
|
||||||
entity-device-class: done
|
entity-device-class: done
|
||||||
entity-disabled-by-default:
|
entity-disabled-by-default: done
|
||||||
status: todo
|
|
||||||
comment: prepared binary_sensors will provide this
|
|
||||||
entity-translations: done
|
entity-translations: done
|
||||||
exception-translations: done
|
exception-translations: done
|
||||||
icon-translations:
|
icon-translations:
|
||||||
|
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from airos.data import NetRole, WirelessMode
|
from airos.data import DerivedWirelessMode, DerivedWirelessRole, NetRole
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
@@ -19,6 +19,8 @@ from homeassistant.const import (
|
|||||||
SIGNAL_STRENGTH_DECIBELS,
|
SIGNAL_STRENGTH_DECIBELS,
|
||||||
UnitOfDataRate,
|
UnitOfDataRate,
|
||||||
UnitOfFrequency,
|
UnitOfFrequency,
|
||||||
|
UnitOfLength,
|
||||||
|
UnitOfTime,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
@@ -29,8 +31,11 @@ from .entity import AirOSEntity
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
WIRELESS_MODE_OPTIONS = [mode.value.replace("-", "_").lower() for mode in WirelessMode]
|
|
||||||
NETROLE_OPTIONS = [mode.value for mode in NetRole]
|
NETROLE_OPTIONS = [mode.value for mode in NetRole]
|
||||||
|
WIRELESS_MODE_OPTIONS = [mode.value for mode in DerivedWirelessMode]
|
||||||
|
WIRELESS_ROLE_OPTIONS = [mode.value for mode in DerivedWirelessRole]
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
@@ -46,6 +51,7 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
translation_key="host_cpuload",
|
translation_key="host_cpuload",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=1,
|
||||||
value_fn=lambda data: data.host.cpuload,
|
value_fn=lambda data: data.host.cpuload,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
@@ -83,6 +89,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.DATA_RATE,
|
device_class=SensorDeviceClass.DATA_RATE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||||
value_fn=lambda data: data.wireless.throughput.tx,
|
value_fn=lambda data: data.wireless.throughput.tx,
|
||||||
),
|
),
|
||||||
AirOSSensorEntityDescription(
|
AirOSSensorEntityDescription(
|
||||||
@@ -91,6 +99,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.DATA_RATE,
|
device_class=SensorDeviceClass.DATA_RATE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||||
value_fn=lambda data: data.wireless.throughput.rx,
|
value_fn=lambda data: data.wireless.throughput.rx,
|
||||||
),
|
),
|
||||||
AirOSSensorEntityDescription(
|
AirOSSensorEntityDescription(
|
||||||
@@ -99,6 +109,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.DATA_RATE,
|
device_class=SensorDeviceClass.DATA_RATE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||||
value_fn=lambda data: data.wireless.polling.dl_capacity,
|
value_fn=lambda data: data.wireless.polling.dl_capacity,
|
||||||
),
|
),
|
||||||
AirOSSensorEntityDescription(
|
AirOSSensorEntityDescription(
|
||||||
@@ -107,8 +119,45 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.DATA_RATE,
|
device_class=SensorDeviceClass.DATA_RATE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||||
value_fn=lambda data: data.wireless.polling.ul_capacity,
|
value_fn=lambda data: data.wireless.polling.ul_capacity,
|
||||||
),
|
),
|
||||||
|
AirOSSensorEntityDescription(
|
||||||
|
key="host_uptime",
|
||||||
|
translation_key="host_uptime",
|
||||||
|
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||||
|
device_class=SensorDeviceClass.DURATION,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfTime.DAYS,
|
||||||
|
value_fn=lambda data: data.host.uptime,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
AirOSSensorEntityDescription(
|
||||||
|
key="wireless_distance",
|
||||||
|
translation_key="wireless_distance",
|
||||||
|
native_unit_of_measurement=UnitOfLength.METERS,
|
||||||
|
device_class=SensorDeviceClass.DISTANCE,
|
||||||
|
suggested_display_precision=1,
|
||||||
|
suggested_unit_of_measurement=UnitOfLength.KILOMETERS,
|
||||||
|
value_fn=lambda data: data.wireless.distance,
|
||||||
|
),
|
||||||
|
AirOSSensorEntityDescription(
|
||||||
|
key="wireless_mode",
|
||||||
|
translation_key="wireless_mode",
|
||||||
|
device_class=SensorDeviceClass.ENUM,
|
||||||
|
value_fn=lambda data: data.derived.mode.value,
|
||||||
|
options=WIRELESS_MODE_OPTIONS,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
AirOSSensorEntityDescription(
|
||||||
|
key="wireless_role",
|
||||||
|
translation_key="wireless_role",
|
||||||
|
device_class=SensorDeviceClass.ENUM,
|
||||||
|
value_fn=lambda data: data.derived.role.value,
|
||||||
|
options=WIRELESS_ROLE_OPTIONS,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -26,6 +26,23 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"port_forwarding": {
|
||||||
|
"name": "Port forwarding"
|
||||||
|
},
|
||||||
|
"dhcp_client": {
|
||||||
|
"name": "DHCP client"
|
||||||
|
},
|
||||||
|
"dhcp_server": {
|
||||||
|
"name": "DHCP server"
|
||||||
|
},
|
||||||
|
"dhcp6_server": {
|
||||||
|
"name": "DHCPv6 server"
|
||||||
|
},
|
||||||
|
"pppoe": {
|
||||||
|
"name": "PPPoE link"
|
||||||
|
}
|
||||||
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"host_cpuload": {
|
"host_cpuload": {
|
||||||
"name": "CPU load"
|
"name": "CPU load"
|
||||||
@@ -60,6 +77,26 @@
|
|||||||
},
|
},
|
||||||
"wireless_remote_hostname": {
|
"wireless_remote_hostname": {
|
||||||
"name": "Remote hostname"
|
"name": "Remote hostname"
|
||||||
|
},
|
||||||
|
"host_uptime": {
|
||||||
|
"name": "Uptime"
|
||||||
|
},
|
||||||
|
"wireless_distance": {
|
||||||
|
"name": "Wireless distance"
|
||||||
|
},
|
||||||
|
"wireless_role": {
|
||||||
|
"name": "Wireless role",
|
||||||
|
"state": {
|
||||||
|
"access_point": "Access point",
|
||||||
|
"station": "Station"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"wireless_mode": {
|
||||||
|
"name": "Wireless mode",
|
||||||
|
"state": {
|
||||||
|
"point_to_point": "Point-to-point",
|
||||||
|
"point_to_multipoint": "Point-to-multipoint"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
|
|||||||
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
|
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
|
||||||
from .coordinator import AirQCoordinator
|
from .coordinator import AirQCoordinator
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR]
|
||||||
|
|
||||||
AirQConfigEntry = ConfigEntry[AirQCoordinator]
|
AirQConfigEntry = ConfigEntry[AirQCoordinator]
|
||||||
|
|
||||||
|
@@ -75,6 +75,7 @@ class AirQCoordinator(DataUpdateCoordinator):
|
|||||||
return_average=self.return_average,
|
return_average=self.return_average,
|
||||||
clip_negative_values=self.clip_negative,
|
clip_negative_values=self.clip_negative,
|
||||||
)
|
)
|
||||||
|
data["brightness"] = await self.airq.get_current_brightness()
|
||||||
if warming_up_sensors := identify_warming_up_sensors(data):
|
if warming_up_sensors := identify_warming_up_sensors(data):
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Following sensors are still warming up: %s", warming_up_sensors
|
"Following sensors are still warming up: %s", warming_up_sensors
|
||||||
|
85
homeassistant/components/airq/number.py
Normal file
85
homeassistant/components/airq/number.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""Definition of air-Q number platform used to control the LED strips."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from aioairq.core import AirQ
|
||||||
|
|
||||||
|
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||||
|
from homeassistant.const import PERCENTAGE
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from . import AirQConfigEntry, AirQCoordinator
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class AirQBrightnessDescription(NumberEntityDescription):
|
||||||
|
"""Describes AirQ number entity responsible for brightness control."""
|
||||||
|
|
||||||
|
value: Callable[[dict], float]
|
||||||
|
set_value: Callable[[AirQ, float], Awaitable[None]]
|
||||||
|
|
||||||
|
|
||||||
|
AIRQ_LED_BRIGHTNESS = AirQBrightnessDescription(
|
||||||
|
key="airq_led_brightness",
|
||||||
|
translation_key="airq_led_brightness",
|
||||||
|
native_min_value=0.0,
|
||||||
|
native_max_value=100.0,
|
||||||
|
native_step=1.0,
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
value=lambda data: data["brightness"],
|
||||||
|
set_value=lambda device, value: device.set_current_brightness(value),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AirQConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up number entities: a single entity for the LEDs."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
entities = [AirQLEDBrightness(coordinator, AIRQ_LED_BRIGHTNESS)]
|
||||||
|
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
class AirQLEDBrightness(CoordinatorEntity[AirQCoordinator], NumberEntity):
|
||||||
|
"""Representation of the LEDs from a single AirQ."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AirQCoordinator,
|
||||||
|
description: AirQBrightnessDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize a single sensor."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self.entity_description: AirQBrightnessDescription = description
|
||||||
|
|
||||||
|
self._attr_device_info = coordinator.device_info
|
||||||
|
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> float:
|
||||||
|
"""Return the brightness of the LEDs in %."""
|
||||||
|
return self.entity_description.value(self.coordinator.data)
|
||||||
|
|
||||||
|
async def async_set_native_value(self, value: float) -> None:
|
||||||
|
"""Set the brightness of the LEDs to the value in %."""
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Changing LED brighntess from %.0f%% to %.0f%%",
|
||||||
|
self.coordinator.data["brightness"],
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
await self.entity_description.set_value(self.coordinator.airq, value)
|
||||||
|
await self.coordinator.async_request_refresh()
|
@@ -35,6 +35,11 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"number": {
|
||||||
|
"airq_led_brightness": {
|
||||||
|
"name": "LED brightness"
|
||||||
|
}
|
||||||
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"acetaldehyde": {
|
"acetaldehyde": {
|
||||||
"name": "Acetaldehyde"
|
"name": "Acetaldehyde"
|
||||||
|
@@ -1,11 +1,11 @@
|
|||||||
"""Alexa Devices integration."""
|
"""Alexa Devices integration."""
|
||||||
|
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import CONF_COUNTRY, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import _LOGGER, COUNTRY_DOMAINS, DOMAIN
|
||||||
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
||||||
from .services import async_setup_services
|
from .services import async_setup_services
|
||||||
|
|
||||||
@@ -40,6 +40,32 @@ async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bo
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_migrate_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||||
|
"""Migrate old entry."""
|
||||||
|
if entry.version == 1 and entry.minor_version == 0:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Migrating from version %s.%s", entry.version, entry.minor_version
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert country in domain
|
||||||
|
country = entry.data[CONF_COUNTRY]
|
||||||
|
domain = COUNTRY_DOMAINS.get(country, country)
|
||||||
|
|
||||||
|
# Save domain and remove country
|
||||||
|
new_data = entry.data.copy()
|
||||||
|
new_data.update({"site": f"https://www.amazon.{domain}"})
|
||||||
|
|
||||||
|
hass.config_entries.async_update_entry(
|
||||||
|
entry, data=new_data, version=1, minor_version=1
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Migration to version %s.%s successful", entry.version, entry.minor_version
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
@@ -10,16 +10,14 @@ from aioamazondevices.exceptions import (
|
|||||||
CannotAuthenticate,
|
CannotAuthenticate,
|
||||||
CannotConnect,
|
CannotConnect,
|
||||||
CannotRetrieveData,
|
CannotRetrieveData,
|
||||||
WrongCountry,
|
|
||||||
)
|
)
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_CODE, CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_CODE, CONF_PASSWORD, CONF_USERNAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import aiohttp_client
|
from homeassistant.helpers import aiohttp_client
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers.selector import CountrySelector
|
|
||||||
|
|
||||||
from .const import CONF_LOGIN_DATA, DOMAIN
|
from .const import CONF_LOGIN_DATA, DOMAIN
|
||||||
|
|
||||||
@@ -29,6 +27,12 @@ STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
|||||||
vol.Required(CONF_CODE): cv.string,
|
vol.Required(CONF_CODE): cv.string,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
STEP_RECONFIGURE = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PASSWORD): cv.string,
|
||||||
|
vol.Required(CONF_CODE): cv.string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
|
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
|
||||||
@@ -37,7 +41,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
|||||||
session = aiohttp_client.async_create_clientsession(hass)
|
session = aiohttp_client.async_create_clientsession(hass)
|
||||||
api = AmazonEchoApi(
|
api = AmazonEchoApi(
|
||||||
session,
|
session,
|
||||||
data[CONF_COUNTRY],
|
|
||||||
data[CONF_USERNAME],
|
data[CONF_USERNAME],
|
||||||
data[CONF_PASSWORD],
|
data[CONF_PASSWORD],
|
||||||
)
|
)
|
||||||
@@ -48,6 +51,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
|||||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
"""Handle a config flow for Alexa Devices."""
|
"""Handle a config flow for Alexa Devices."""
|
||||||
|
|
||||||
|
VERSION = 1
|
||||||
|
MINOR_VERSION = 1
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
@@ -58,12 +64,10 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
data = await validate_input(self.hass, user_input)
|
data = await validate_input(self.hass, user_input)
|
||||||
except CannotConnect:
|
except CannotConnect:
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except CannotAuthenticate:
|
except (CannotAuthenticate, TypeError):
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except CannotRetrieveData:
|
except CannotRetrieveData:
|
||||||
errors["base"] = "cannot_retrieve_data"
|
errors["base"] = "cannot_retrieve_data"
|
||||||
except WrongCountry:
|
|
||||||
errors["base"] = "wrong_country"
|
|
||||||
else:
|
else:
|
||||||
await self.async_set_unique_id(data["customer_info"]["user_id"])
|
await self.async_set_unique_id(data["customer_info"]["user_id"])
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
@@ -78,9 +82,6 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
errors=errors,
|
errors=errors,
|
||||||
data_schema=vol.Schema(
|
data_schema=vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(
|
|
||||||
CONF_COUNTRY, default=self.hass.config.country
|
|
||||||
): CountrySelector(),
|
|
||||||
vol.Required(CONF_USERNAME): cv.string,
|
vol.Required(CONF_USERNAME): cv.string,
|
||||||
vol.Required(CONF_PASSWORD): cv.string,
|
vol.Required(CONF_PASSWORD): cv.string,
|
||||||
vol.Required(CONF_CODE): cv.string,
|
vol.Required(CONF_CODE): cv.string,
|
||||||
@@ -109,7 +110,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
await validate_input(self.hass, {**reauth_entry.data, **user_input})
|
await validate_input(self.hass, {**reauth_entry.data, **user_input})
|
||||||
except CannotConnect:
|
except CannotConnect:
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except CannotAuthenticate:
|
except (CannotAuthenticate, TypeError):
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except CannotRetrieveData:
|
except CannotRetrieveData:
|
||||||
errors["base"] = "cannot_retrieve_data"
|
errors["base"] = "cannot_retrieve_data"
|
||||||
@@ -129,3 +130,47 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
data_schema=STEP_REAUTH_DATA_SCHEMA,
|
data_schema=STEP_REAUTH_DATA_SCHEMA,
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_step_reconfigure(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reconfiguration of the device."""
|
||||||
|
reconfigure_entry = self._get_reconfigure_entry()
|
||||||
|
if not user_input:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reconfigure",
|
||||||
|
data_schema=STEP_RECONFIGURE,
|
||||||
|
)
|
||||||
|
|
||||||
|
updated_password = user_input[CONF_PASSWORD]
|
||||||
|
|
||||||
|
self._async_abort_entries_match(
|
||||||
|
{CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME]}
|
||||||
|
)
|
||||||
|
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = await validate_input(
|
||||||
|
self.hass, {**reconfigure_entry.data, **user_input}
|
||||||
|
)
|
||||||
|
except CannotConnect:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except CannotAuthenticate:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
except CannotRetrieveData:
|
||||||
|
errors["base"] = "cannot_retrieve_data"
|
||||||
|
else:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
reconfigure_entry,
|
||||||
|
data_updates={
|
||||||
|
CONF_PASSWORD: updated_password,
|
||||||
|
CONF_LOGIN_DATA: data,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reconfigure",
|
||||||
|
data_schema=STEP_RECONFIGURE,
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
@@ -6,3 +6,22 @@ _LOGGER = logging.getLogger(__package__)
|
|||||||
|
|
||||||
DOMAIN = "alexa_devices"
|
DOMAIN = "alexa_devices"
|
||||||
CONF_LOGIN_DATA = "login_data"
|
CONF_LOGIN_DATA = "login_data"
|
||||||
|
|
||||||
|
DEFAULT_DOMAIN = {"domain": "com"}
|
||||||
|
COUNTRY_DOMAINS = {
|
||||||
|
"ar": DEFAULT_DOMAIN,
|
||||||
|
"at": DEFAULT_DOMAIN,
|
||||||
|
"au": {"domain": "com.au"},
|
||||||
|
"be": {"domain": "com.be"},
|
||||||
|
"br": DEFAULT_DOMAIN,
|
||||||
|
"gb": {"domain": "co.uk"},
|
||||||
|
"il": DEFAULT_DOMAIN,
|
||||||
|
"jp": {"domain": "co.jp"},
|
||||||
|
"mx": {"domain": "com.mx"},
|
||||||
|
"no": DEFAULT_DOMAIN,
|
||||||
|
"nz": {"domain": "com.au"},
|
||||||
|
"pl": DEFAULT_DOMAIN,
|
||||||
|
"tr": {"domain": "com.tr"},
|
||||||
|
"us": DEFAULT_DOMAIN,
|
||||||
|
"za": {"domain": "co.za"},
|
||||||
|
}
|
||||||
|
@@ -11,7 +11,7 @@ from aioamazondevices.exceptions import (
|
|||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
@@ -44,7 +44,6 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
|||||||
)
|
)
|
||||||
self.api = AmazonEchoApi(
|
self.api = AmazonEchoApi(
|
||||||
session,
|
session,
|
||||||
entry.data[CONF_COUNTRY],
|
|
||||||
entry.data[CONF_USERNAME],
|
entry.data[CONF_USERNAME],
|
||||||
entry.data[CONF_PASSWORD],
|
entry.data[CONF_PASSWORD],
|
||||||
entry.data[CONF_LOGIN_DATA],
|
entry.data[CONF_LOGIN_DATA],
|
||||||
@@ -67,7 +66,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
|||||||
translation_key="cannot_retrieve_data_with_error",
|
translation_key="cannot_retrieve_data_with_error",
|
||||||
translation_placeholders={"error": repr(err)},
|
translation_placeholders={"error": repr(err)},
|
||||||
) from err
|
) from err
|
||||||
except CannotAuthenticate as err:
|
except (CannotAuthenticate, TypeError) as err:
|
||||||
raise ConfigEntryAuthFailed(
|
raise ConfigEntryAuthFailed(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="invalid_auth",
|
translation_key="invalid_auth",
|
||||||
|
@@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioamazondevices"],
|
"loggers": ["aioamazondevices"],
|
||||||
"quality_scale": "silver",
|
"quality_scale": "silver",
|
||||||
"requirements": ["aioamazondevices==4.0.0"]
|
"requirements": ["aioamazondevices==5.0.0"]
|
||||||
}
|
}
|
||||||
|
@@ -60,7 +60,7 @@ rules:
|
|||||||
entity-translations: done
|
entity-translations: done
|
||||||
exception-translations: done
|
exception-translations: done
|
||||||
icon-translations: done
|
icon-translations: done
|
||||||
reconfiguration-flow: todo
|
reconfiguration-flow: done
|
||||||
repair-issues:
|
repair-issues:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: no known use cases for repair issues or flows, yet
|
comment: no known use cases for repair issues or flows, yet
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
{
|
{
|
||||||
"common": {
|
"common": {
|
||||||
"data_code": "One-time password (OTP code)",
|
"data_code": "One-time password (OTP code)",
|
||||||
"data_description_country": "The country where your Amazon account is registered.",
|
|
||||||
"data_description_username": "The email address of your Amazon account.",
|
"data_description_username": "The email address of your Amazon account.",
|
||||||
"data_description_password": "The password of your Amazon account.",
|
"data_description_password": "The password of your Amazon account.",
|
||||||
"data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported.",
|
"data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported.",
|
||||||
@@ -12,13 +11,11 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"country": "[%key:common::config_flow::data::country%]",
|
|
||||||
"username": "[%key:common::config_flow::data::username%]",
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
"password": "[%key:common::config_flow::data::password%]",
|
"password": "[%key:common::config_flow::data::password%]",
|
||||||
"code": "[%key:component::alexa_devices::common::data_code%]"
|
"code": "[%key:component::alexa_devices::common::data_code%]"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"country": "[%key:component::alexa_devices::common::data_description_country%]",
|
|
||||||
"username": "[%key:component::alexa_devices::common::data_description_username%]",
|
"username": "[%key:component::alexa_devices::common::data_description_username%]",
|
||||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||||
@@ -33,6 +30,16 @@
|
|||||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"reconfigure": {
|
||||||
|
"data": {
|
||||||
|
"password": "[%key:common::config_flow::data::password%]",
|
||||||
|
"code": "[%key:component::alexa_devices::common::data_code%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||||
|
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
@@ -40,13 +47,13 @@
|
|||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||||
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"cannot_retrieve_data": "Unable to retrieve data from Amazon. Please try again later.",
|
"cannot_retrieve_data": "Unable to retrieve data from Amazon. Please try again later.",
|
||||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||||
"wrong_country": "Wrong country selected. Please select the country where your Amazon account is registered.",
|
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@@ -16,7 +16,7 @@ from homeassistant.helpers.selector import (
|
|||||||
SelectSelectorMode,
|
SelectSelectorMode,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .const import CONF_SITE_ID, CONF_SITE_NAME, DOMAIN
|
from .const import CONF_SITE_ID, CONF_SITE_NAME, DOMAIN, REQUEST_TIMEOUT
|
||||||
|
|
||||||
API_URL = "https://app.amber.com.au/developers"
|
API_URL = "https://app.amber.com.au/developers"
|
||||||
|
|
||||||
@@ -64,7 +64,9 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
api = amberelectric.AmberApi(api_client)
|
api = amberelectric.AmberApi(api_client)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sites: list[Site] = filter_sites(api.get_sites())
|
sites: list[Site] = filter_sites(
|
||||||
|
api.get_sites(_request_timeout=REQUEST_TIMEOUT)
|
||||||
|
)
|
||||||
except amberelectric.ApiException as api_exception:
|
except amberelectric.ApiException as api_exception:
|
||||||
if api_exception.status == 403:
|
if api_exception.status == 403:
|
||||||
self._errors[CONF_API_TOKEN] = "invalid_api_token"
|
self._errors[CONF_API_TOKEN] = "invalid_api_token"
|
||||||
|
@@ -21,3 +21,5 @@ SERVICE_GET_FORECASTS = "get_forecasts"
|
|||||||
GENERAL_CHANNEL = "general"
|
GENERAL_CHANNEL = "general"
|
||||||
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
CONTROLLED_LOAD_CHANNEL = "controlled_load"
|
||||||
FEED_IN_CHANNEL = "feed_in"
|
FEED_IN_CHANNEL = "feed_in"
|
||||||
|
|
||||||
|
REQUEST_TIMEOUT = 15
|
||||||
|
@@ -16,7 +16,7 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import LOGGER
|
from .const import LOGGER, REQUEST_TIMEOUT
|
||||||
from .helpers import normalize_descriptor
|
from .helpers import normalize_descriptor
|
||||||
|
|
||||||
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
||||||
@@ -82,7 +82,11 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
"grid": {},
|
"grid": {},
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
data = self._api.get_current_prices(self.site_id, next=288)
|
data = self._api.get_current_prices(
|
||||||
|
self.site_id,
|
||||||
|
next=288,
|
||||||
|
_request_timeout=REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
intervals = [interval.actual_instance for interval in data]
|
intervals = [interval.actual_instance for interval in data]
|
||||||
except ApiException as api_exception:
|
except ApiException as api_exception:
|
||||||
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
||||||
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
from aioambient.util import get_public_device_id
|
from aioambient.util import get_public_device_id
|
||||||
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||||
|
|
||||||
@@ -37,6 +37,7 @@ class AmbientWeatherEntity(Entity):
|
|||||||
identifiers={(DOMAIN, mac_address)},
|
identifiers={(DOMAIN, mac_address)},
|
||||||
manufacturer="Ambient Weather",
|
manufacturer="Ambient Weather",
|
||||||
name=station_name.capitalize(),
|
name=station_name.capitalize(),
|
||||||
|
connections={(CONNECTION_NETWORK_MAC, mac_address)},
|
||||||
)
|
)
|
||||||
|
|
||||||
self._attr_unique_id = f"{mac_address}_{description.key}"
|
self._attr_unique_id = f"{mac_address}_{description.key}"
|
||||||
|
@@ -390,7 +390,6 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
|||||||
|
|
||||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||||
"""Return the devices payload."""
|
"""Return the devices payload."""
|
||||||
integrations_without_model_id: set[str] = set()
|
|
||||||
devices: list[dict[str, Any]] = []
|
devices: list[dict[str, Any]] = []
|
||||||
dev_reg = dr.async_get(hass)
|
dev_reg = dr.async_get(hass)
|
||||||
# Devices that need via device info set
|
# Devices that need via device info set
|
||||||
@@ -400,10 +399,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
|||||||
seen_integrations = set()
|
seen_integrations = set()
|
||||||
|
|
||||||
for device in dev_reg.devices.values():
|
for device in dev_reg.devices.values():
|
||||||
# Ignore services
|
|
||||||
if device.entry_type:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not device.primary_config_entry:
|
if not device.primary_config_entry:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -414,13 +409,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
|||||||
|
|
||||||
seen_integrations.add(config_entry.domain)
|
seen_integrations.add(config_entry.domain)
|
||||||
|
|
||||||
if not device.model_id:
|
|
||||||
integrations_without_model_id.add(config_entry.domain)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not device.manufacturer:
|
|
||||||
continue
|
|
||||||
|
|
||||||
new_indexes[device.id] = len(devices)
|
new_indexes[device.id] = len(devices)
|
||||||
devices.append(
|
devices.append(
|
||||||
{
|
{
|
||||||
@@ -432,8 +420,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
|||||||
"hw_version": device.hw_version,
|
"hw_version": device.hw_version,
|
||||||
"has_configuration_url": device.configuration_url is not None,
|
"has_configuration_url": device.configuration_url is not None,
|
||||||
"via_device": None,
|
"via_device": None,
|
||||||
|
"entry_type": device.entry_type.value if device.entry_type else None,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
if device.via_device_id:
|
if device.via_device_id:
|
||||||
via_devices[device.id] = device.via_device_id
|
via_devices[device.id] = device.via_device_id
|
||||||
|
|
||||||
@@ -453,15 +443,12 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
|||||||
for device_info in devices:
|
for device_info in devices:
|
||||||
if integration := integrations.get(device_info["integration"]):
|
if integration := integrations.get(device_info["integration"]):
|
||||||
device_info["is_custom_integration"] = not integration.is_built_in
|
device_info["is_custom_integration"] = not integration.is_built_in
|
||||||
|
# Include version for custom integrations
|
||||||
|
if not integration.is_built_in and integration.version:
|
||||||
|
device_info["custom_integration_version"] = str(integration.version)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"version": "home-assistant:1",
|
"version": "home-assistant:1",
|
||||||
"no_model_id": sorted(
|
"home_assistant": HA_VERSION,
|
||||||
[
|
|
||||||
domain
|
|
||||||
for domain in integrations_without_model_id
|
|
||||||
if domain in integrations and integrations[domain].is_built_in
|
|
||||||
]
|
|
||||||
),
|
|
||||||
"devices": devices,
|
"devices": devices,
|
||||||
}
|
}
|
||||||
|
@@ -30,10 +30,9 @@ class AndroidIPCamDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
|||||||
cam: PyDroidIPCam,
|
cam: PyDroidIPCam,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the Android IP Webcam."""
|
"""Initialize the Android IP Webcam."""
|
||||||
self.hass = hass
|
|
||||||
self.cam = cam
|
self.cam = cam
|
||||||
super().__init__(
|
super().__init__(
|
||||||
self.hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=config_entry,
|
config_entry=config_entry,
|
||||||
name=f"{DOMAIN} {config_entry.data[CONF_HOST]}",
|
name=f"{DOMAIN} {config_entry.data[CONF_HOST]}",
|
||||||
|
@@ -20,10 +20,8 @@ RECOMMENDED_THINKING_BUDGET = 0
|
|||||||
MIN_THINKING_BUDGET = 1024
|
MIN_THINKING_BUDGET = 1024
|
||||||
|
|
||||||
THINKING_MODELS = [
|
THINKING_MODELS = [
|
||||||
"claude-3-7-sonnet-20250219",
|
"claude-3-7-sonnet",
|
||||||
"claude-3-7-sonnet-latest",
|
|
||||||
"claude-opus-4-20250514",
|
|
||||||
"claude-opus-4-0",
|
|
||||||
"claude-sonnet-4-20250514",
|
|
||||||
"claude-sonnet-4-0",
|
"claude-sonnet-4-0",
|
||||||
|
"claude-opus-4-0",
|
||||||
|
"claude-opus-4-1",
|
||||||
]
|
]
|
||||||
|
@@ -2,11 +2,10 @@
|
|||||||
|
|
||||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||||
import json
|
import json
|
||||||
from typing import Any, cast
|
from typing import Any
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
from anthropic import AsyncStream
|
from anthropic import AsyncStream
|
||||||
from anthropic._types import NOT_GIVEN
|
|
||||||
from anthropic.types import (
|
from anthropic.types import (
|
||||||
InputJSONDelta,
|
InputJSONDelta,
|
||||||
MessageDeltaUsage,
|
MessageDeltaUsage,
|
||||||
@@ -17,7 +16,6 @@ from anthropic.types import (
|
|||||||
RawContentBlockStopEvent,
|
RawContentBlockStopEvent,
|
||||||
RawMessageDeltaEvent,
|
RawMessageDeltaEvent,
|
||||||
RawMessageStartEvent,
|
RawMessageStartEvent,
|
||||||
RawMessageStopEvent,
|
|
||||||
RedactedThinkingBlock,
|
RedactedThinkingBlock,
|
||||||
RedactedThinkingBlockParam,
|
RedactedThinkingBlockParam,
|
||||||
SignatureDelta,
|
SignatureDelta,
|
||||||
@@ -35,6 +33,7 @@ from anthropic.types import (
|
|||||||
ToolUseBlockParam,
|
ToolUseBlockParam,
|
||||||
Usage,
|
Usage,
|
||||||
)
|
)
|
||||||
|
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||||
from voluptuous_openapi import convert
|
from voluptuous_openapi import convert
|
||||||
|
|
||||||
from homeassistant.components import conversation
|
from homeassistant.components import conversation
|
||||||
@@ -129,6 +128,28 @@ def _convert_content(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if isinstance(content.native, ThinkingBlock):
|
||||||
|
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||||
|
ThinkingBlockParam(
|
||||||
|
type="thinking",
|
||||||
|
thinking=content.thinking_content or "",
|
||||||
|
signature=content.native.signature,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif isinstance(content.native, RedactedThinkingBlock):
|
||||||
|
redacted_thinking_block = RedactedThinkingBlockParam(
|
||||||
|
type="redacted_thinking",
|
||||||
|
data=content.native.data,
|
||||||
|
)
|
||||||
|
if isinstance(messages[-1]["content"], str):
|
||||||
|
messages[-1]["content"] = [
|
||||||
|
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||||
|
redacted_thinking_block,
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||||
|
redacted_thinking_block
|
||||||
|
)
|
||||||
if content.content:
|
if content.content:
|
||||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||||
TextBlockParam(type="text", text=content.content)
|
TextBlockParam(type="text", text=content.content)
|
||||||
@@ -152,10 +173,9 @@ def _convert_content(
|
|||||||
return messages
|
return messages
|
||||||
|
|
||||||
|
|
||||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
async def _transform_stream(
|
||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
result: AsyncStream[MessageStreamEvent],
|
stream: AsyncStream[MessageStreamEvent],
|
||||||
messages: list[MessageParam],
|
|
||||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||||
"""Transform the response stream into HA format.
|
"""Transform the response stream into HA format.
|
||||||
|
|
||||||
@@ -186,31 +206,25 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
|
|
||||||
Each message could contain multiple blocks of the same type.
|
Each message could contain multiple blocks of the same type.
|
||||||
"""
|
"""
|
||||||
if result is None:
|
if stream is None:
|
||||||
raise TypeError("Expected a stream of messages")
|
raise TypeError("Expected a stream of messages")
|
||||||
|
|
||||||
current_message: MessageParam | None = None
|
current_tool_block: ToolUseBlockParam | None = None
|
||||||
current_block: (
|
|
||||||
TextBlockParam
|
|
||||||
| ToolUseBlockParam
|
|
||||||
| ThinkingBlockParam
|
|
||||||
| RedactedThinkingBlockParam
|
|
||||||
| None
|
|
||||||
) = None
|
|
||||||
current_tool_args: str
|
current_tool_args: str
|
||||||
input_usage: Usage | None = None
|
input_usage: Usage | None = None
|
||||||
|
has_content = False
|
||||||
|
has_native = False
|
||||||
|
|
||||||
async for response in result:
|
async for response in stream:
|
||||||
LOGGER.debug("Received response: %s", response)
|
LOGGER.debug("Received response: %s", response)
|
||||||
|
|
||||||
if isinstance(response, RawMessageStartEvent):
|
if isinstance(response, RawMessageStartEvent):
|
||||||
if response.message.role != "assistant":
|
if response.message.role != "assistant":
|
||||||
raise ValueError("Unexpected message role")
|
raise ValueError("Unexpected message role")
|
||||||
current_message = MessageParam(role=response.message.role, content=[])
|
|
||||||
input_usage = response.message.usage
|
input_usage = response.message.usage
|
||||||
elif isinstance(response, RawContentBlockStartEvent):
|
elif isinstance(response, RawContentBlockStartEvent):
|
||||||
if isinstance(response.content_block, ToolUseBlock):
|
if isinstance(response.content_block, ToolUseBlock):
|
||||||
current_block = ToolUseBlockParam(
|
current_tool_block = ToolUseBlockParam(
|
||||||
type="tool_use",
|
type="tool_use",
|
||||||
id=response.content_block.id,
|
id=response.content_block.id,
|
||||||
name=response.content_block.name,
|
name=response.content_block.name,
|
||||||
@@ -218,75 +232,64 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
)
|
)
|
||||||
current_tool_args = ""
|
current_tool_args = ""
|
||||||
elif isinstance(response.content_block, TextBlock):
|
elif isinstance(response.content_block, TextBlock):
|
||||||
current_block = TextBlockParam(
|
if has_content:
|
||||||
type="text", text=response.content_block.text
|
yield {"role": "assistant"}
|
||||||
)
|
has_native = False
|
||||||
yield {"role": "assistant"}
|
has_content = True
|
||||||
if response.content_block.text:
|
if response.content_block.text:
|
||||||
yield {"content": response.content_block.text}
|
yield {"content": response.content_block.text}
|
||||||
elif isinstance(response.content_block, ThinkingBlock):
|
elif isinstance(response.content_block, ThinkingBlock):
|
||||||
current_block = ThinkingBlockParam(
|
if has_native:
|
||||||
type="thinking",
|
yield {"role": "assistant"}
|
||||||
thinking=response.content_block.thinking,
|
has_native = False
|
||||||
signature=response.content_block.signature,
|
has_content = False
|
||||||
)
|
|
||||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||||
current_block = RedactedThinkingBlockParam(
|
|
||||||
type="redacted_thinking", data=response.content_block.data
|
|
||||||
)
|
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Some of Claude’s internal reasoning has been automatically "
|
"Some of Claude’s internal reasoning has been automatically "
|
||||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||||
"responses"
|
"responses"
|
||||||
)
|
)
|
||||||
|
if has_native:
|
||||||
|
yield {"role": "assistant"}
|
||||||
|
has_native = False
|
||||||
|
has_content = False
|
||||||
|
yield {"native": response.content_block}
|
||||||
|
has_native = True
|
||||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||||
if current_block is None:
|
|
||||||
raise ValueError("Unexpected delta without a block")
|
|
||||||
if isinstance(response.delta, InputJSONDelta):
|
if isinstance(response.delta, InputJSONDelta):
|
||||||
current_tool_args += response.delta.partial_json
|
current_tool_args += response.delta.partial_json
|
||||||
elif isinstance(response.delta, TextDelta):
|
elif isinstance(response.delta, TextDelta):
|
||||||
text_block = cast(TextBlockParam, current_block)
|
|
||||||
text_block["text"] += response.delta.text
|
|
||||||
yield {"content": response.delta.text}
|
yield {"content": response.delta.text}
|
||||||
elif isinstance(response.delta, ThinkingDelta):
|
elif isinstance(response.delta, ThinkingDelta):
|
||||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
yield {"thinking_content": response.delta.thinking}
|
||||||
thinking_block["thinking"] += response.delta.thinking
|
|
||||||
elif isinstance(response.delta, SignatureDelta):
|
elif isinstance(response.delta, SignatureDelta):
|
||||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
yield {
|
||||||
thinking_block["signature"] += response.delta.signature
|
"native": ThinkingBlock(
|
||||||
|
type="thinking",
|
||||||
|
thinking="",
|
||||||
|
signature=response.delta.signature,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
has_native = True
|
||||||
elif isinstance(response, RawContentBlockStopEvent):
|
elif isinstance(response, RawContentBlockStopEvent):
|
||||||
if current_block is None:
|
if current_tool_block is not None:
|
||||||
raise ValueError("Unexpected stop event without a current block")
|
|
||||||
if current_block["type"] == "tool_use":
|
|
||||||
# tool block
|
|
||||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||||
current_block["input"] = tool_args
|
current_tool_block["input"] = tool_args
|
||||||
yield {
|
yield {
|
||||||
"tool_calls": [
|
"tool_calls": [
|
||||||
llm.ToolInput(
|
llm.ToolInput(
|
||||||
id=current_block["id"],
|
id=current_tool_block["id"],
|
||||||
tool_name=current_block["name"],
|
tool_name=current_tool_block["name"],
|
||||||
tool_args=tool_args,
|
tool_args=tool_args,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
elif current_block["type"] == "thinking":
|
current_tool_block = None
|
||||||
# thinking block
|
|
||||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
|
||||||
|
|
||||||
if current_message is None:
|
|
||||||
raise ValueError("Unexpected stop event without a current message")
|
|
||||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
|
||||||
current_block = None
|
|
||||||
elif isinstance(response, RawMessageDeltaEvent):
|
elif isinstance(response, RawMessageDeltaEvent):
|
||||||
if (usage := response.usage) is not None:
|
if (usage := response.usage) is not None:
|
||||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||||
if response.delta.stop_reason == "refusal":
|
if response.delta.stop_reason == "refusal":
|
||||||
raise HomeAssistantError("Potential policy violation detected")
|
raise HomeAssistantError("Potential policy violation detected")
|
||||||
elif isinstance(response, RawMessageStopEvent):
|
|
||||||
if current_message is not None:
|
|
||||||
messages.append(current_message)
|
|
||||||
current_message = None
|
|
||||||
|
|
||||||
|
|
||||||
def _create_token_stats(
|
def _create_token_stats(
|
||||||
@@ -351,45 +354,48 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
|
|
||||||
|
model_args = MessageCreateParamsStreaming(
|
||||||
|
model=model,
|
||||||
|
messages=messages,
|
||||||
|
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||||
|
system=system.content,
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
|
if tools:
|
||||||
|
model_args["tools"] = tools
|
||||||
|
if (
|
||||||
|
model.startswith(tuple(THINKING_MODELS))
|
||||||
|
and thinking_budget >= MIN_THINKING_BUDGET
|
||||||
|
):
|
||||||
|
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||||
|
type="enabled", budget_tokens=thinking_budget
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||||
|
model_args["temperature"] = options.get(
|
||||||
|
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||||
|
)
|
||||||
|
|
||||||
# To prevent infinite loops, we limit the number of iterations
|
# To prevent infinite loops, we limit the number of iterations
|
||||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||||
model_args = {
|
|
||||||
"model": model,
|
|
||||||
"messages": messages,
|
|
||||||
"tools": tools or NOT_GIVEN,
|
|
||||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
|
||||||
"system": system.content,
|
|
||||||
"stream": True,
|
|
||||||
}
|
|
||||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
|
||||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
|
||||||
type="enabled", budget_tokens=thinking_budget
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
|
||||||
model_args["temperature"] = options.get(
|
|
||||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stream = await client.messages.create(**model_args)
|
stream = await client.messages.create(**model_args)
|
||||||
|
|
||||||
|
messages.extend(
|
||||||
|
_convert_content(
|
||||||
|
[
|
||||||
|
content
|
||||||
|
async for content in chat_log.async_add_delta_content_stream(
|
||||||
|
self.entity_id,
|
||||||
|
_transform_stream(chat_log, stream),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
except anthropic.AnthropicError as err:
|
except anthropic.AnthropicError as err:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
messages.extend(
|
|
||||||
_convert_content(
|
|
||||||
[
|
|
||||||
content
|
|
||||||
async for content in chat_log.async_add_delta_content_stream(
|
|
||||||
self.entity_id,
|
|
||||||
_transform_stream(chat_log, stream, messages),
|
|
||||||
)
|
|
||||||
if not isinstance(content, conversation.AssistantContent)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not chat_log.unresponded_tool_results:
|
if not chat_log.unresponded_tool_results:
|
||||||
break
|
break
|
||||||
|
@@ -8,5 +8,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["anthropic==0.52.0"]
|
"requirements": ["anthropic==0.62.0"]
|
||||||
}
|
}
|
||||||
|
@@ -10,9 +10,9 @@ from homeassistant.components.binary_sensor import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|
||||||
|
|
||||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||||
|
from .entity import APCUPSdEntity
|
||||||
|
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
@@ -40,22 +40,16 @@ async def async_setup_entry(
|
|||||||
async_add_entities([OnlineStatus(coordinator, _DESCRIPTION)])
|
async_add_entities([OnlineStatus(coordinator, _DESCRIPTION)])
|
||||||
|
|
||||||
|
|
||||||
class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity):
|
class OnlineStatus(APCUPSdEntity, BinarySensorEntity):
|
||||||
"""Representation of a UPS online status."""
|
"""Representation of a UPS online status."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: APCUPSdCoordinator,
|
coordinator: APCUPSdCoordinator,
|
||||||
description: BinarySensorEntityDescription,
|
description: BinarySensorEntityDescription,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the APCUPSd binary device."""
|
"""Initialize the APCUPSd binary device."""
|
||||||
super().__init__(coordinator, context=description.key.upper())
|
super().__init__(coordinator, description)
|
||||||
|
|
||||||
self.entity_description = description
|
|
||||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
|
||||||
self._attr_device_info = coordinator.device_info
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self) -> bool | None:
|
def is_on(self) -> bool | None:
|
||||||
|
26
homeassistant/components/apcupsd/entity.py
Normal file
26
homeassistant/components/apcupsd/entity.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
"""Base entity for APCUPSd integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .coordinator import APCUPSdCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
class APCUPSdEntity(CoordinatorEntity[APCUPSdCoordinator]):
|
||||||
|
"""Base entity for APCUPSd integration."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: APCUPSdCoordinator,
|
||||||
|
description: EntityDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the APCUPSd entity."""
|
||||||
|
super().__init__(coordinator, context=description.key.upper())
|
||||||
|
|
||||||
|
self.entity_description = description
|
||||||
|
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||||
|
self._attr_device_info = coordinator.device_info
|
@@ -6,5 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["apcaccess"],
|
"loggers": ["apcaccess"],
|
||||||
|
"quality_scale": "bronze",
|
||||||
"requirements": ["aioapcaccess==0.4.2"]
|
"requirements": ["aioapcaccess==0.4.2"]
|
||||||
}
|
}
|
||||||
|
90
homeassistant/components/apcupsd/quality_scale.yaml
Normal file
90
homeassistant/components/apcupsd/quality_scale.yaml
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup: done
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not provide any actions.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Entities of this integration does not explicitly subscribe to events.
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
# Silver
|
||||||
|
action-exceptions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not provide any actions.
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not provide any additional options.
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable: done
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable: done
|
||||||
|
parallel-updates: done
|
||||||
|
reauthentication-flow:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not require authentication.
|
||||||
|
test-coverage:
|
||||||
|
status: todo
|
||||||
|
comment: |
|
||||||
|
Patch `aioapcaccess.request_status` where we use it.
|
||||||
|
# Gold
|
||||||
|
devices: done
|
||||||
|
diagnostics: done
|
||||||
|
discovery-update-info:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration cannot be discovered.
|
||||||
|
discovery:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration cannot be discovered.
|
||||||
|
docs-data-update: done
|
||||||
|
docs-examples: done
|
||||||
|
docs-known-limitations: done
|
||||||
|
docs-supported-devices: done
|
||||||
|
docs-supported-functions: done
|
||||||
|
docs-troubleshooting: done
|
||||||
|
docs-use-cases: done
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration connects to a single service per configuration entry.
|
||||||
|
entity-category: done
|
||||||
|
entity-device-class: done
|
||||||
|
entity-disabled-by-default: done
|
||||||
|
entity-translations: done
|
||||||
|
exception-translations: done
|
||||||
|
icon-translations: done
|
||||||
|
reconfiguration-flow: done
|
||||||
|
repair-issues: done
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration connect to a single service per configuration entry.
|
||||||
|
# Platinum
|
||||||
|
async-dependency: done
|
||||||
|
inject-websession:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not connect via HTTP.
|
||||||
|
strict-typing: done
|
@@ -23,10 +23,10 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|
||||||
|
|
||||||
from .const import LAST_S_TEST
|
from .const import LAST_S_TEST
|
||||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||||
|
from .entity import APCUPSdEntity
|
||||||
|
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
@@ -490,22 +490,16 @@ def infer_unit(value: str) -> tuple[str, str | None]:
|
|||||||
return value, None
|
return value, None
|
||||||
|
|
||||||
|
|
||||||
class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
|
class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||||
"""Representation of a sensor entity for APCUPSd status values."""
|
"""Representation of a sensor entity for APCUPSd status values."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: APCUPSdCoordinator,
|
coordinator: APCUPSdCoordinator,
|
||||||
description: SensorEntityDescription,
|
description: SensorEntityDescription,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
super().__init__(coordinator=coordinator, context=description.key.upper())
|
super().__init__(coordinator, description)
|
||||||
|
|
||||||
self.entity_description = description
|
|
||||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
|
||||||
self._attr_device_info = coordinator.device_info
|
|
||||||
|
|
||||||
# Initial update of attributes.
|
# Initial update of attributes.
|
||||||
self._update_attrs()
|
self._update_attrs()
|
||||||
|
@@ -14,7 +14,22 @@
|
|||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
"port": "[%key:common::config_flow::data::port%]"
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
},
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of the APC UPS Daemon",
|
||||||
|
"port": "The port the APC UPS Daemon is listening on"
|
||||||
|
},
|
||||||
"description": "Enter the host and port on which the apcupsd NIS is being served."
|
"description": "Enter the host and port on which the apcupsd NIS is being served."
|
||||||
|
},
|
||||||
|
"reconfigure": {
|
||||||
|
"data": {
|
||||||
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "[%key:component::apcupsd::config::step::user::data_description::host%]",
|
||||||
|
"port": "[%key:component::apcupsd::config::step::user::data_description::port%]"
|
||||||
|
},
|
||||||
|
"description": "[%key:component::apcupsd::config::step::user::description%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@@ -11,7 +11,7 @@ import time
|
|||||||
from typing import Any, Literal, final
|
from typing import Any, Literal, final
|
||||||
|
|
||||||
from hassil import Intents, recognize
|
from hassil import Intents, recognize
|
||||||
from hassil.expression import Expression, ListReference, Sequence
|
from hassil.expression import Expression, Group, ListReference
|
||||||
from hassil.intents import WildcardSlotList
|
from hassil.intents import WildcardSlotList
|
||||||
|
|
||||||
from homeassistant.components import conversation, media_source, stt, tts
|
from homeassistant.components import conversation, media_source, stt, tts
|
||||||
@@ -413,7 +413,7 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
for intent in intents.intents.values():
|
for intent in intents.intents.values():
|
||||||
for intent_data in intent.data:
|
for intent_data in intent.data:
|
||||||
for sentence in intent_data.sentences:
|
for sentence in intent_data.sentences:
|
||||||
_collect_list_references(sentence, wildcard_names)
|
_collect_list_references(sentence.expression, wildcard_names)
|
||||||
|
|
||||||
for wildcard_name in wildcard_names:
|
for wildcard_name in wildcard_names:
|
||||||
intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
|
intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
|
||||||
@@ -727,9 +727,9 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
|
|
||||||
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
|
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
|
||||||
"""Collect list reference names recursively."""
|
"""Collect list reference names recursively."""
|
||||||
if isinstance(expression, Sequence):
|
if isinstance(expression, Group):
|
||||||
seq: Sequence = expression
|
grp: Group = expression
|
||||||
for item in seq.items:
|
for item in grp.items:
|
||||||
_collect_list_references(item, list_names)
|
_collect_list_references(item, list_names)
|
||||||
elif isinstance(expression, ListReference):
|
elif isinstance(expression, ListReference):
|
||||||
# {list}
|
# {list}
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
||||||
"integration_type": "entity",
|
"integration_type": "entity",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["hassil==2.2.3"]
|
"requirements": ["hassil==3.1.0"]
|
||||||
}
|
}
|
||||||
|
@@ -5,15 +5,17 @@ from __future__ import annotations
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from collections.abc import Awaitable, Callable, Coroutine
|
from collections.abc import Awaitable, Callable, Coroutine
|
||||||
from datetime import datetime
|
|
||||||
import functools
|
import functools
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
from pyasuswrt import AsusWrtError, AsusWrtHttp
|
from asusrouter import AsusRouter, AsusRouterError
|
||||||
from pyasuswrt.exceptions import AsusWrtNotAvailableInfoError
|
from asusrouter.modules.client import AsusClient
|
||||||
|
from asusrouter.modules.data import AsusData
|
||||||
|
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
|
||||||
|
from asusrouter.tools.connection import get_cookie_jar
|
||||||
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_HOST,
|
CONF_HOST,
|
||||||
@@ -24,7 +26,7 @@ from homeassistant.const import (
|
|||||||
CONF_USERNAME,
|
CONF_USERNAME,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||||
from homeassistant.helpers.device_registry import format_mac
|
from homeassistant.helpers.device_registry import format_mac
|
||||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||||
|
|
||||||
@@ -41,14 +43,13 @@ from .const import (
|
|||||||
PROTOCOL_HTTPS,
|
PROTOCOL_HTTPS,
|
||||||
PROTOCOL_TELNET,
|
PROTOCOL_TELNET,
|
||||||
SENSORS_BYTES,
|
SENSORS_BYTES,
|
||||||
SENSORS_CPU,
|
|
||||||
SENSORS_LOAD_AVG,
|
SENSORS_LOAD_AVG,
|
||||||
SENSORS_MEMORY,
|
SENSORS_MEMORY,
|
||||||
SENSORS_RATES,
|
SENSORS_RATES,
|
||||||
SENSORS_TEMPERATURES,
|
|
||||||
SENSORS_TEMPERATURES_LEGACY,
|
SENSORS_TEMPERATURES_LEGACY,
|
||||||
SENSORS_UPTIME,
|
SENSORS_UPTIME,
|
||||||
)
|
)
|
||||||
|
from .helpers import clean_dict, translate_to_legacy
|
||||||
|
|
||||||
SENSORS_TYPE_BYTES = "sensors_bytes"
|
SENSORS_TYPE_BYTES = "sensors_bytes"
|
||||||
SENSORS_TYPE_COUNT = "sensors_count"
|
SENSORS_TYPE_COUNT = "sensors_count"
|
||||||
@@ -109,7 +110,10 @@ class AsusWrtBridge(ABC):
|
|||||||
) -> AsusWrtBridge:
|
) -> AsusWrtBridge:
|
||||||
"""Get Bridge instance."""
|
"""Get Bridge instance."""
|
||||||
if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP):
|
if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP):
|
||||||
session = async_get_clientsession(hass)
|
session = async_create_clientsession(
|
||||||
|
hass,
|
||||||
|
cookie_jar=get_cookie_jar(),
|
||||||
|
)
|
||||||
return AsusWrtHttpBridge(conf, session)
|
return AsusWrtHttpBridge(conf, session)
|
||||||
return AsusWrtLegacyBridge(conf, options)
|
return AsusWrtLegacyBridge(conf, options)
|
||||||
|
|
||||||
@@ -310,16 +314,16 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
|
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
|
||||||
"""Initialize Bridge that use HTTP library."""
|
"""Initialize Bridge that use HTTP library."""
|
||||||
super().__init__(conf[CONF_HOST])
|
super().__init__(conf[CONF_HOST])
|
||||||
self._api: AsusWrtHttp = self._get_api(conf, session)
|
self._api = self._get_api(conf, session)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusWrtHttp:
|
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusRouter:
|
||||||
"""Get the AsusWrtHttp API."""
|
"""Get the AsusRouter API."""
|
||||||
return AsusWrtHttp(
|
return AsusRouter(
|
||||||
conf[CONF_HOST],
|
hostname=conf[CONF_HOST],
|
||||||
conf[CONF_USERNAME],
|
username=conf[CONF_USERNAME],
|
||||||
conf.get(CONF_PASSWORD, ""),
|
password=conf.get(CONF_PASSWORD, ""),
|
||||||
use_https=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
|
use_ssl=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
|
||||||
port=conf.get(CONF_PORT),
|
port=conf.get(CONF_PORT),
|
||||||
session=session,
|
session=session,
|
||||||
)
|
)
|
||||||
@@ -327,46 +331,90 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
@property
|
@property
|
||||||
def is_connected(self) -> bool:
|
def is_connected(self) -> bool:
|
||||||
"""Get connected status."""
|
"""Get connected status."""
|
||||||
return cast(bool, self._api.is_connected)
|
return self._api.connected
|
||||||
|
|
||||||
async def async_connect(self) -> None:
|
async def async_connect(self) -> None:
|
||||||
"""Connect to the device."""
|
"""Connect to the device."""
|
||||||
await self._api.async_connect()
|
await self._api.async_connect()
|
||||||
|
|
||||||
|
# Collect the identity
|
||||||
|
_identity = await self._api.async_get_identity()
|
||||||
|
|
||||||
# get main router properties
|
# get main router properties
|
||||||
if mac := self._api.mac:
|
if mac := _identity.mac:
|
||||||
self._label_mac = format_mac(mac)
|
self._label_mac = format_mac(mac)
|
||||||
self._firmware = self._api.firmware
|
self._firmware = str(_identity.firmware)
|
||||||
self._model = self._api.model
|
self._model = _identity.model
|
||||||
|
|
||||||
async def async_disconnect(self) -> None:
|
async def async_disconnect(self) -> None:
|
||||||
"""Disconnect to the device."""
|
"""Disconnect to the device."""
|
||||||
await self._api.async_disconnect()
|
await self._api.async_disconnect()
|
||||||
|
|
||||||
|
async def _get_data(
|
||||||
|
self,
|
||||||
|
datatype: AsusData,
|
||||||
|
force: bool = False,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Get data from the device.
|
||||||
|
|
||||||
|
This is a generic method which automatically converts to
|
||||||
|
the Home Assistant-compatible format.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
raw = await self._api.async_get_data(datatype, force=force)
|
||||||
|
return translate_to_legacy(clean_dict(convert_to_ha_data(raw)))
|
||||||
|
except AsusRouterError as ex:
|
||||||
|
raise UpdateFailed(ex) from ex
|
||||||
|
|
||||||
|
async def _get_sensors(self, datatype: AsusData) -> list[str]:
|
||||||
|
"""Get the available sensors.
|
||||||
|
|
||||||
|
This is a generic method which automatically converts to
|
||||||
|
the Home Assistant-compatible format.
|
||||||
|
"""
|
||||||
|
sensors = []
|
||||||
|
try:
|
||||||
|
data = await self._api.async_get_data(datatype)
|
||||||
|
# Get the list of sensors from the raw data
|
||||||
|
# and translate in to the legacy format
|
||||||
|
sensors = translate_to_legacy(convert_to_ha_sensors(data, datatype))
|
||||||
|
_LOGGER.debug("Available `%s` sensors: %s", datatype.value, sensors)
|
||||||
|
except AsusRouterError as ex:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Cannot get available `%s` sensors with exception: %s",
|
||||||
|
datatype.value,
|
||||||
|
ex,
|
||||||
|
)
|
||||||
|
return sensors
|
||||||
|
|
||||||
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
||||||
"""Get list of connected devices."""
|
"""Get list of connected devices."""
|
||||||
api_devices = await self._api.async_get_connected_devices()
|
api_devices: dict[str, AsusClient] = await self._api.async_get_data(
|
||||||
|
AsusData.CLIENTS, force=True
|
||||||
|
)
|
||||||
return {
|
return {
|
||||||
format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node)
|
format_mac(mac): WrtDevice(
|
||||||
|
dev.connection.ip_address, dev.description.name, dev.connection.node
|
||||||
|
)
|
||||||
for mac, dev in api_devices.items()
|
for mac, dev in api_devices.items()
|
||||||
|
if dev.connection is not None
|
||||||
|
and dev.description is not None
|
||||||
|
and dev.connection.ip_address is not None
|
||||||
}
|
}
|
||||||
|
|
||||||
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
||||||
"""Return a dictionary of available sensors for this bridge."""
|
"""Return a dictionary of available sensors for this bridge."""
|
||||||
sensors_cpu = await self._get_available_cpu_sensors()
|
|
||||||
sensors_temperatures = await self._get_available_temperature_sensors()
|
|
||||||
sensors_loadavg = await self._get_loadavg_sensors_availability()
|
|
||||||
return {
|
return {
|
||||||
SENSORS_TYPE_BYTES: {
|
SENSORS_TYPE_BYTES: {
|
||||||
KEY_SENSORS: SENSORS_BYTES,
|
KEY_SENSORS: SENSORS_BYTES,
|
||||||
KEY_METHOD: self._get_bytes,
|
KEY_METHOD: self._get_bytes,
|
||||||
},
|
},
|
||||||
SENSORS_TYPE_CPU: {
|
SENSORS_TYPE_CPU: {
|
||||||
KEY_SENSORS: sensors_cpu,
|
KEY_SENSORS: await self._get_sensors(AsusData.CPU),
|
||||||
KEY_METHOD: self._get_cpu_usage,
|
KEY_METHOD: self._get_cpu_usage,
|
||||||
},
|
},
|
||||||
SENSORS_TYPE_LOAD_AVG: {
|
SENSORS_TYPE_LOAD_AVG: {
|
||||||
KEY_SENSORS: sensors_loadavg,
|
KEY_SENSORS: await self._get_sensors(AsusData.SYSINFO),
|
||||||
KEY_METHOD: self._get_load_avg,
|
KEY_METHOD: self._get_load_avg,
|
||||||
},
|
},
|
||||||
SENSORS_TYPE_MEMORY: {
|
SENSORS_TYPE_MEMORY: {
|
||||||
@@ -382,95 +430,44 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
KEY_METHOD: self._get_uptime,
|
KEY_METHOD: self._get_uptime,
|
||||||
},
|
},
|
||||||
SENSORS_TYPE_TEMPERATURES: {
|
SENSORS_TYPE_TEMPERATURES: {
|
||||||
KEY_SENSORS: sensors_temperatures,
|
KEY_SENSORS: await self._get_sensors(AsusData.TEMPERATURE),
|
||||||
KEY_METHOD: self._get_temperatures,
|
KEY_METHOD: self._get_temperatures,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _get_available_cpu_sensors(self) -> list[str]:
|
|
||||||
"""Check which cpu information is available on the router."""
|
|
||||||
try:
|
|
||||||
available_cpu = await self._api.async_get_cpu_usage()
|
|
||||||
available_sensors = [t for t in SENSORS_CPU if t in available_cpu]
|
|
||||||
except AsusWrtError as exc:
|
|
||||||
_LOGGER.warning(
|
|
||||||
(
|
|
||||||
"Failed checking cpu sensor availability for ASUS router"
|
|
||||||
" %s. Exception: %s"
|
|
||||||
),
|
|
||||||
self.host,
|
|
||||||
exc,
|
|
||||||
)
|
|
||||||
return []
|
|
||||||
return available_sensors
|
|
||||||
|
|
||||||
async def _get_available_temperature_sensors(self) -> list[str]:
|
|
||||||
"""Check which temperature information is available on the router."""
|
|
||||||
try:
|
|
||||||
available_temps = await self._api.async_get_temperatures()
|
|
||||||
available_sensors = [
|
|
||||||
t for t in SENSORS_TEMPERATURES if t in available_temps
|
|
||||||
]
|
|
||||||
except AsusWrtError as exc:
|
|
||||||
_LOGGER.warning(
|
|
||||||
(
|
|
||||||
"Failed checking temperature sensor availability for ASUS router"
|
|
||||||
" %s. Exception: %s"
|
|
||||||
),
|
|
||||||
self.host,
|
|
||||||
exc,
|
|
||||||
)
|
|
||||||
return []
|
|
||||||
return available_sensors
|
|
||||||
|
|
||||||
async def _get_loadavg_sensors_availability(self) -> list[str]:
|
|
||||||
"""Check if load avg is available on the router."""
|
|
||||||
try:
|
|
||||||
await self._api.async_get_loadavg()
|
|
||||||
except AsusWrtNotAvailableInfoError:
|
|
||||||
return []
|
|
||||||
except AsusWrtError:
|
|
||||||
pass
|
|
||||||
return SENSORS_LOAD_AVG
|
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, SENSORS_BYTES)
|
|
||||||
async def _get_bytes(self) -> Any:
|
async def _get_bytes(self) -> Any:
|
||||||
"""Fetch byte information from the router."""
|
"""Fetch byte information from the router."""
|
||||||
return await self._api.async_get_traffic_bytes()
|
return await self._get_data(AsusData.NETWORK)
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, SENSORS_RATES)
|
|
||||||
async def _get_rates(self) -> Any:
|
async def _get_rates(self) -> Any:
|
||||||
"""Fetch rates information from the router."""
|
"""Fetch rates information from the router."""
|
||||||
return await self._api.async_get_traffic_rates()
|
data = await self._get_data(AsusData.NETWORK)
|
||||||
|
# Convert from bits/s to Bytes/s for compatibility with legacy sensors
|
||||||
|
return {
|
||||||
|
key: (
|
||||||
|
value / 8
|
||||||
|
if key in SENSORS_RATES and isinstance(value, (int, float))
|
||||||
|
else value
|
||||||
|
)
|
||||||
|
for key, value in data.items()
|
||||||
|
}
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, SENSORS_LOAD_AVG)
|
|
||||||
async def _get_load_avg(self) -> Any:
|
async def _get_load_avg(self) -> Any:
|
||||||
"""Fetch cpu load avg information from the router."""
|
"""Fetch cpu load avg information from the router."""
|
||||||
return await self._api.async_get_loadavg()
|
return await self._get_data(AsusData.SYSINFO)
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, None)
|
|
||||||
async def _get_temperatures(self) -> Any:
|
async def _get_temperatures(self) -> Any:
|
||||||
"""Fetch temperatures information from the router."""
|
"""Fetch temperatures information from the router."""
|
||||||
return await self._api.async_get_temperatures()
|
return await self._get_data(AsusData.TEMPERATURE)
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, None)
|
|
||||||
async def _get_cpu_usage(self) -> Any:
|
async def _get_cpu_usage(self) -> Any:
|
||||||
"""Fetch cpu information from the router."""
|
"""Fetch cpu information from the router."""
|
||||||
return await self._api.async_get_cpu_usage()
|
return await self._get_data(AsusData.CPU)
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, None)
|
|
||||||
async def _get_memory_usage(self) -> Any:
|
async def _get_memory_usage(self) -> Any:
|
||||||
"""Fetch memory information from the router."""
|
"""Fetch memory information from the router."""
|
||||||
return await self._api.async_get_memory_usage()
|
return await self._get_data(AsusData.RAM)
|
||||||
|
|
||||||
async def _get_uptime(self) -> dict[str, Any]:
|
async def _get_uptime(self) -> dict[str, Any]:
|
||||||
"""Fetch uptime from the router."""
|
"""Fetch uptime from the router."""
|
||||||
try:
|
return await self._get_data(AsusData.BOOTTIME)
|
||||||
uptimes = await self._api.async_get_uptime()
|
|
||||||
except AsusWrtError as exc:
|
|
||||||
raise UpdateFailed(exc) from exc
|
|
||||||
|
|
||||||
last_boot = datetime.fromisoformat(uptimes["last_boot"])
|
|
||||||
uptime = uptimes["uptime"]
|
|
||||||
|
|
||||||
return dict(zip(SENSORS_UPTIME, [last_boot, uptime], strict=False))
|
|
||||||
|
@@ -7,7 +7,7 @@ import os
|
|||||||
import socket
|
import socket
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from pyasuswrt import AsusWrtError
|
from asusrouter import AsusRouterError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (
|
||||||
@@ -189,7 +189,7 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
try:
|
try:
|
||||||
await api.async_connect()
|
await api.async_connect()
|
||||||
|
|
||||||
except (AsusWrtError, OSError):
|
except (AsusRouterError, OSError):
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Error connecting to the AsusWrt router at %s using protocol %s",
|
"Error connecting to the AsusWrt router at %s using protocol %s",
|
||||||
host,
|
host,
|
||||||
|
56
homeassistant/components/asuswrt/helpers.py
Normal file
56
homeassistant/components/asuswrt/helpers.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""Helpers for AsusWRT integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, TypeVar
|
||||||
|
|
||||||
|
T = TypeVar("T", dict[str, Any], list[Any], None)
|
||||||
|
|
||||||
|
TRANSLATION_MAP = {
|
||||||
|
"wan_rx": "sensor_rx_bytes",
|
||||||
|
"wan_tx": "sensor_tx_bytes",
|
||||||
|
"total_usage": "cpu_total_usage",
|
||||||
|
"usage": "mem_usage_perc",
|
||||||
|
"free": "mem_free",
|
||||||
|
"used": "mem_used",
|
||||||
|
"wan_rx_speed": "sensor_rx_rates",
|
||||||
|
"wan_tx_speed": "sensor_tx_rates",
|
||||||
|
"2ghz": "2.4GHz",
|
||||||
|
"5ghz": "5.0GHz",
|
||||||
|
"5ghz2": "5.0GHz_2",
|
||||||
|
"6ghz": "6.0GHz",
|
||||||
|
"cpu": "CPU",
|
||||||
|
"datetime": "sensor_last_boot",
|
||||||
|
"uptime": "sensor_uptime",
|
||||||
|
**{f"{num}_usage": f"cpu{num}_usage" for num in range(1, 9)},
|
||||||
|
**{f"load_avg_{load}": f"sensor_load_avg{load}" for load in ("1", "5", "15")},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""Cleans dictionary from None values.
|
||||||
|
|
||||||
|
The `state` key is always preserved regardless of its value.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
|
||||||
|
|
||||||
|
|
||||||
|
def translate_to_legacy(raw: T) -> T:
|
||||||
|
"""Translate raw data to legacy format for dicts and lists."""
|
||||||
|
|
||||||
|
if raw is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(raw, dict):
|
||||||
|
return {TRANSLATION_MAP.get(k, k): v for k, v in raw.items()}
|
||||||
|
|
||||||
|
if isinstance(raw, list):
|
||||||
|
return [
|
||||||
|
TRANSLATION_MAP[item]
|
||||||
|
if isinstance(item, str) and item in TRANSLATION_MAP
|
||||||
|
else item
|
||||||
|
for item in raw
|
||||||
|
]
|
||||||
|
|
||||||
|
return raw
|
@@ -1,11 +1,11 @@
|
|||||||
{
|
{
|
||||||
"domain": "asuswrt",
|
"domain": "asuswrt",
|
||||||
"name": "ASUSWRT",
|
"name": "ASUSWRT",
|
||||||
"codeowners": ["@kennedyshead", "@ollo69"],
|
"codeowners": ["@kennedyshead", "@ollo69", "@Vaskivskyi"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioasuswrt", "asyncssh"],
|
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||||
"requirements": ["aioasuswrt==1.4.0", "pyasuswrt==0.1.21"]
|
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.20.0"]
|
||||||
}
|
}
|
||||||
|
@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
|
|||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from pyasuswrt import AsusWrtError
|
from asusrouter import AsusRouterError
|
||||||
|
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (
|
||||||
CONF_CONSIDER_HOME,
|
CONF_CONSIDER_HOME,
|
||||||
@@ -229,7 +229,7 @@ class AsusWrtRouter:
|
|||||||
"""Set up a AsusWrt router."""
|
"""Set up a AsusWrt router."""
|
||||||
try:
|
try:
|
||||||
await self._api.async_connect()
|
await self._api.async_connect()
|
||||||
except (AsusWrtError, OSError) as exc:
|
except (AsusRouterError, OSError) as exc:
|
||||||
raise ConfigEntryNotReady from exc
|
raise ConfigEntryNotReady from exc
|
||||||
if not self._api.is_connected:
|
if not self._api.is_connected:
|
||||||
raise ConfigEntryNotReady
|
raise ConfigEntryNotReady
|
||||||
@@ -284,7 +284,7 @@ class AsusWrtRouter:
|
|||||||
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
|
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
|
||||||
try:
|
try:
|
||||||
wrt_devices = await self._api.async_get_connected_devices()
|
wrt_devices = await self._api.async_get_connected_devices()
|
||||||
except (OSError, AsusWrtError) as exc:
|
except (OSError, AsusRouterError) as exc:
|
||||||
if not self._connect_error:
|
if not self._connect_error:
|
||||||
self._connect_error = True
|
self._connect_error = True
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
|
@@ -6,18 +6,21 @@ from pathlib import Path
|
|||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
from aiohttp import ClientResponseError
|
from aiohttp import ClientResponseError
|
||||||
from yalexs.const import Brand
|
|
||||||
from yalexs.exceptions import AugustApiAIOHTTPError
|
from yalexs.exceptions import AugustApiAIOHTTPError
|
||||||
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
|
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
|
||||||
from yalexs.manager.gateway import Config as YaleXSConfig
|
from yalexs.manager.gateway import Config as YaleXSConfig
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
from homeassistant.helpers import (
|
||||||
|
config_entry_oauth2_flow,
|
||||||
|
device_registry as dr,
|
||||||
|
issue_registry as ir,
|
||||||
|
)
|
||||||
|
|
||||||
from .const import DOMAIN, PLATFORMS
|
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
|
||||||
from .data import AugustData
|
from .data import AugustData
|
||||||
from .gateway import AugustGateway
|
from .gateway import AugustGateway
|
||||||
from .util import async_create_august_clientsession
|
from .util import async_create_august_clientsession
|
||||||
@@ -25,30 +28,21 @@ from .util import async_create_august_clientsession
|
|||||||
type AugustConfigEntry = ConfigEntry[AugustData]
|
type AugustConfigEntry = ConfigEntry[AugustData]
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_create_yale_brand_migration_issue(
|
|
||||||
hass: HomeAssistant, entry: AugustConfigEntry
|
|
||||||
) -> None:
|
|
||||||
"""Create an issue for a brand migration."""
|
|
||||||
ir.async_create_issue(
|
|
||||||
hass,
|
|
||||||
DOMAIN,
|
|
||||||
"yale_brand_migration",
|
|
||||||
breaks_in_ha_version="2024.9",
|
|
||||||
learn_more_url="https://www.home-assistant.io/integrations/yale",
|
|
||||||
translation_key="yale_brand_migration",
|
|
||||||
is_fixable=False,
|
|
||||||
severity=ir.IssueSeverity.CRITICAL,
|
|
||||||
translation_placeholders={
|
|
||||||
"migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale"
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool:
|
||||||
"""Set up August from a config entry."""
|
"""Set up August from a config entry."""
|
||||||
|
# Check if this is a legacy config entry that needs migration to OAuth
|
||||||
|
if "auth_implementation" not in entry.data:
|
||||||
|
# This is a legacy entry using username/password, trigger reauth
|
||||||
|
raise ConfigEntryAuthFailed("Migration to OAuth required")
|
||||||
|
|
||||||
session = async_create_august_clientsession(hass)
|
session = async_create_august_clientsession(hass)
|
||||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session)
|
implementation = (
|
||||||
|
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||||
|
hass, entry
|
||||||
|
)
|
||||||
|
)
|
||||||
|
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||||
|
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||||
try:
|
try:
|
||||||
await async_setup_august(hass, entry, august_gateway)
|
await async_setup_august(hass, entry, august_gateway)
|
||||||
except (RequireValidation, InvalidAuth) as err:
|
except (RequireValidation, InvalidAuth) as err:
|
||||||
@@ -76,9 +70,7 @@ async def async_setup_august(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the August component."""
|
"""Set up the August component."""
|
||||||
config = cast(YaleXSConfig, entry.data)
|
config = cast(YaleXSConfig, entry.data)
|
||||||
await august_gateway.async_setup(config)
|
await august_gateway.async_setup({**config, "brand": DEFAULT_AUGUST_BRAND})
|
||||||
if august_gateway.api.brand == Brand.YALE_HOME:
|
|
||||||
_async_create_yale_brand_migration_issue(hass, entry)
|
|
||||||
await august_gateway.async_authenticate()
|
await august_gateway.async_authenticate()
|
||||||
await august_gateway.async_refresh_access_token_if_needed()
|
await august_gateway.async_refresh_access_token_if_needed()
|
||||||
data = entry.runtime_data = AugustData(hass, august_gateway)
|
data = entry.runtime_data = AugustData(hass, august_gateway)
|
||||||
|
15
homeassistant/components/august/application_credentials.py
Normal file
15
homeassistant/components/august/application_credentials.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
"""application_credentials platform for the august integration."""
|
||||||
|
|
||||||
|
from homeassistant.components.application_credentials import AuthorizationServer
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
OAUTH2_AUTHORIZE = "https://auth.august.com/authorization"
|
||||||
|
OAUTH2_TOKEN = "https://auth.august.com/access_token"
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||||
|
"""Return authorization server."""
|
||||||
|
return AuthorizationServer(
|
||||||
|
authorize_url=OAUTH2_AUTHORIZE,
|
||||||
|
token_url=OAUTH2_TOKEN,
|
||||||
|
)
|
@@ -1,284 +1,86 @@
|
|||||||
"""Config flow for August integration."""
|
"""Config flow for August integration."""
|
||||||
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from dataclasses import dataclass
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import aiohttp
|
import jwt
|
||||||
import voluptuous as vol
|
|
||||||
from yalexs.authenticator_common import ValidationResult
|
|
||||||
from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND, Brand
|
|
||||||
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
|
|
||||||
|
|
||||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.helpers import config_entry_oauth2_flow
|
||||||
from homeassistant.core import callback
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
CONF_ACCESS_TOKEN_CACHE_FILE,
|
|
||||||
CONF_BRAND,
|
|
||||||
CONF_LOGIN_METHOD,
|
|
||||||
DEFAULT_LOGIN_METHOD,
|
|
||||||
DOMAIN,
|
|
||||||
LOGIN_METHODS,
|
|
||||||
VERIFICATION_CODE_KEY,
|
|
||||||
)
|
|
||||||
from .gateway import AugustGateway
|
|
||||||
from .util import async_create_august_clientsession
|
|
||||||
|
|
||||||
# The Yale Home Brand is not supported by the August integration
|
|
||||||
# anymore and should migrate to the Yale integration
|
|
||||||
AVAILABLE_BRANDS = BRANDS_WITHOUT_OAUTH.copy()
|
|
||||||
del AVAILABLE_BRANDS[Brand.YALE_HOME]
|
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_validate_input(
|
class AugustConfigFlow(
|
||||||
data: dict[str, Any], august_gateway: AugustGateway
|
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN
|
||||||
) -> dict[str, Any]:
|
):
|
||||||
"""Validate the user input allows us to connect.
|
|
||||||
|
|
||||||
Data has the keys from DATA_SCHEMA with values provided by the user.
|
|
||||||
|
|
||||||
Request configuration steps from the user.
|
|
||||||
"""
|
|
||||||
assert august_gateway.authenticator is not None
|
|
||||||
authenticator = august_gateway.authenticator
|
|
||||||
if (code := data.get(VERIFICATION_CODE_KEY)) is not None:
|
|
||||||
result = await authenticator.async_validate_verification_code(code)
|
|
||||||
_LOGGER.debug("Verification code validation: %s", result)
|
|
||||||
if result != ValidationResult.VALIDATED:
|
|
||||||
raise RequireValidation
|
|
||||||
|
|
||||||
try:
|
|
||||||
await august_gateway.async_authenticate()
|
|
||||||
except RequireValidation:
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Requesting new verification code for %s via %s",
|
|
||||||
data.get(CONF_USERNAME),
|
|
||||||
data.get(CONF_LOGIN_METHOD),
|
|
||||||
)
|
|
||||||
if code is None:
|
|
||||||
await august_gateway.authenticator.async_send_verification_code()
|
|
||||||
raise
|
|
||||||
|
|
||||||
return {
|
|
||||||
"title": data.get(CONF_USERNAME),
|
|
||||||
"data": august_gateway.config_entry(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class ValidateResult:
|
|
||||||
"""Result from validation."""
|
|
||||||
|
|
||||||
validation_required: bool
|
|
||||||
info: dict[str, Any]
|
|
||||||
errors: dict[str, str]
|
|
||||||
description_placeholders: dict[str, str]
|
|
||||||
|
|
||||||
|
|
||||||
class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
||||||
"""Handle a config flow for August."""
|
"""Handle a config flow for August."""
|
||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
DOMAIN = DOMAIN
|
||||||
|
|
||||||
def __init__(self) -> None:
|
@property
|
||||||
"""Store an AugustGateway()."""
|
def logger(self) -> logging.Logger:
|
||||||
self._august_gateway: AugustGateway | None = None
|
"""Return logger."""
|
||||||
self._aiohttp_session: aiohttp.ClientSession | None = None
|
return _LOGGER
|
||||||
self._user_auth_details: dict[str, Any] = {}
|
|
||||||
self._needs_reset = True
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
async def async_step_user(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Handle the initial step."""
|
|
||||||
return await self.async_step_user_validate()
|
|
||||||
|
|
||||||
async def async_step_user_validate(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Handle authentication."""
|
|
||||||
errors: dict[str, str] = {}
|
|
||||||
description_placeholders: dict[str, str] = {}
|
|
||||||
if user_input is not None:
|
|
||||||
self._user_auth_details.update(user_input)
|
|
||||||
validate_result = await self._async_auth_or_validate()
|
|
||||||
description_placeholders = validate_result.description_placeholders
|
|
||||||
if validate_result.validation_required:
|
|
||||||
return await self.async_step_validation()
|
|
||||||
if not (errors := validate_result.errors):
|
|
||||||
return await self._async_update_or_create_entry(validate_result.info)
|
|
||||||
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="user_validate",
|
|
||||||
data_schema=vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(
|
|
||||||
CONF_BRAND,
|
|
||||||
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
|
|
||||||
): vol.In(AVAILABLE_BRANDS),
|
|
||||||
vol.Required(
|
|
||||||
CONF_LOGIN_METHOD,
|
|
||||||
default=self._user_auth_details.get(
|
|
||||||
CONF_LOGIN_METHOD, DEFAULT_LOGIN_METHOD
|
|
||||||
),
|
|
||||||
): vol.In(LOGIN_METHODS),
|
|
||||||
vol.Required(
|
|
||||||
CONF_USERNAME,
|
|
||||||
default=self._user_auth_details.get(CONF_USERNAME),
|
|
||||||
): str,
|
|
||||||
vol.Required(CONF_PASSWORD): str,
|
|
||||||
}
|
|
||||||
),
|
|
||||||
errors=errors,
|
|
||||||
description_placeholders=description_placeholders,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_step_validation(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Handle validation (2fa) step."""
|
|
||||||
if user_input:
|
|
||||||
if self.source == SOURCE_REAUTH:
|
|
||||||
return await self.async_step_reauth_validate(user_input)
|
|
||||||
return await self.async_step_user_validate(user_input)
|
|
||||||
|
|
||||||
previously_failed = VERIFICATION_CODE_KEY in self._user_auth_details
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="validation",
|
|
||||||
data_schema=vol.Schema(
|
|
||||||
{vol.Required(VERIFICATION_CODE_KEY): vol.All(str, vol.Strip)}
|
|
||||||
),
|
|
||||||
errors={"base": "invalid_verification_code"} if previously_failed else None,
|
|
||||||
description_placeholders={
|
|
||||||
CONF_BRAND: self._user_auth_details[CONF_BRAND],
|
|
||||||
CONF_USERNAME: self._user_auth_details[CONF_USERNAME],
|
|
||||||
CONF_LOGIN_METHOD: self._user_auth_details[CONF_LOGIN_METHOD],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_get_gateway(self) -> AugustGateway:
|
|
||||||
"""Set up the gateway."""
|
|
||||||
if self._august_gateway is not None:
|
|
||||||
return self._august_gateway
|
|
||||||
self._aiohttp_session = async_create_august_clientsession(self.hass)
|
|
||||||
self._august_gateway = AugustGateway(
|
|
||||||
Path(self.hass.config.config_dir), self._aiohttp_session
|
|
||||||
)
|
|
||||||
return self._august_gateway
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _async_shutdown_gateway(self) -> None:
|
|
||||||
"""Shutdown the gateway."""
|
|
||||||
if self._aiohttp_session is not None:
|
|
||||||
self._aiohttp_session.detach()
|
|
||||||
self._august_gateway = None
|
|
||||||
|
|
||||||
async def async_step_reauth(
|
async def async_step_reauth(
|
||||||
self, entry_data: Mapping[str, Any]
|
self, entry_data: Mapping[str, Any]
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle configuration by re-auth."""
|
"""Handle configuration by re-auth."""
|
||||||
self._user_auth_details = dict(entry_data)
|
return await self.async_step_user()
|
||||||
return await self.async_step_reauth_validate()
|
|
||||||
|
|
||||||
async def async_step_reauth_validate(
|
def _async_decode_jwt(self, encoded: str) -> dict[str, Any]:
|
||||||
self, user_input: dict[str, Any] | None = None
|
"""Decode JWT token."""
|
||||||
|
return jwt.decode(
|
||||||
|
encoded,
|
||||||
|
"",
|
||||||
|
verify=False,
|
||||||
|
options={"verify_signature": False},
|
||||||
|
algorithms=["HS256"],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_handle_reauth(
|
||||||
|
self, data: dict, decoded: dict[str, Any], user_id: str
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle reauth and validation."""
|
"""Handle reauth flow."""
|
||||||
errors: dict[str, str] = {}
|
reauth_entry = self._get_reauth_entry()
|
||||||
description_placeholders: dict[str, str] = {}
|
assert reauth_entry.unique_id is not None
|
||||||
if user_input is not None:
|
# Check if this is a migration from username (contains @) to user ID
|
||||||
self._user_auth_details.update(user_input)
|
if "@" not in reauth_entry.unique_id:
|
||||||
validate_result = await self._async_auth_or_validate()
|
# This is a normal oauth reauth, enforce ID matching for security
|
||||||
description_placeholders = validate_result.description_placeholders
|
await self.async_set_unique_id(user_id)
|
||||||
if validate_result.validation_required:
|
self._abort_if_unique_id_mismatch(reason="reauth_invalid_user")
|
||||||
return await self.async_step_validation()
|
return self.async_update_reload_and_abort(reauth_entry, data=data)
|
||||||
if not (errors := validate_result.errors):
|
|
||||||
return await self._async_update_or_create_entry(validate_result.info)
|
|
||||||
|
|
||||||
return self.async_show_form(
|
# This is a one-time migration from username to user ID
|
||||||
step_id="reauth_validate",
|
# Only validate if the account has emails
|
||||||
data_schema=vol.Schema(
|
emails: list[str]
|
||||||
{
|
if emails := decoded.get("email", []):
|
||||||
vol.Required(
|
# Validate that the email matches before allowing migration
|
||||||
CONF_BRAND,
|
email_to_check_lower = reauth_entry.unique_id.casefold()
|
||||||
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
|
if not any(email.casefold() == email_to_check_lower for email in emails):
|
||||||
): vol.In(BRANDS_WITHOUT_OAUTH),
|
# Email doesn't match - this is a different account
|
||||||
vol.Required(CONF_PASSWORD): str,
|
return self.async_abort(reason="reauth_invalid_user")
|
||||||
}
|
|
||||||
),
|
# Email matches or no emails on account, update with new unique ID
|
||||||
errors=errors,
|
return self.async_update_reload_and_abort(
|
||||||
description_placeholders=description_placeholders
|
reauth_entry, data=data, unique_id=user_id
|
||||||
| {
|
|
||||||
CONF_USERNAME: self._user_auth_details[CONF_USERNAME],
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_reset_access_token_cache_if_needed(
|
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||||
self, gateway: AugustGateway, username: str, access_token_cache_file: str | None
|
"""Create an entry for the flow."""
|
||||||
) -> None:
|
# Decode JWT once
|
||||||
"""Reset the access token cache if needed."""
|
access_token = data["token"]["access_token"]
|
||||||
# We need to configure the access token cache file before we setup the gateway
|
decoded = self._async_decode_jwt(access_token)
|
||||||
# since we need to reset it if the brand changes BEFORE we setup the gateway
|
user_id = decoded["userId"]
|
||||||
gateway.async_configure_access_token_cache_file(
|
|
||||||
username, access_token_cache_file
|
|
||||||
)
|
|
||||||
if self._needs_reset:
|
|
||||||
self._needs_reset = False
|
|
||||||
await gateway.async_reset_authentication()
|
|
||||||
|
|
||||||
async def _async_auth_or_validate(self) -> ValidateResult:
|
if self.source == SOURCE_REAUTH:
|
||||||
"""Authenticate or validate."""
|
return await self._async_handle_reauth(data, decoded, user_id)
|
||||||
user_auth_details = self._user_auth_details
|
|
||||||
gateway = self._async_get_gateway()
|
|
||||||
assert gateway is not None
|
|
||||||
await self._async_reset_access_token_cache_if_needed(
|
|
||||||
gateway,
|
|
||||||
user_auth_details[CONF_USERNAME],
|
|
||||||
user_auth_details.get(CONF_ACCESS_TOKEN_CACHE_FILE),
|
|
||||||
)
|
|
||||||
await gateway.async_setup(user_auth_details)
|
|
||||||
|
|
||||||
errors: dict[str, str] = {}
|
await self.async_set_unique_id(user_id)
|
||||||
info: dict[str, Any] = {}
|
self._abort_if_unique_id_configured()
|
||||||
description_placeholders: dict[str, str] = {}
|
return await super().async_oauth_create_entry(data)
|
||||||
validation_required = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
info = await async_validate_input(user_auth_details, gateway)
|
|
||||||
except CannotConnect:
|
|
||||||
errors["base"] = "cannot_connect"
|
|
||||||
except InvalidAuth:
|
|
||||||
errors["base"] = "invalid_auth"
|
|
||||||
except RequireValidation:
|
|
||||||
validation_required = True
|
|
||||||
except Exception as ex:
|
|
||||||
_LOGGER.exception("Unexpected exception")
|
|
||||||
errors["base"] = "unhandled"
|
|
||||||
description_placeholders = {"error": str(ex)}
|
|
||||||
|
|
||||||
return ValidateResult(
|
|
||||||
validation_required, info, errors, description_placeholders
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _async_update_or_create_entry(
|
|
||||||
self, info: dict[str, Any]
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Update existing entry or create a new one."""
|
|
||||||
self._async_shutdown_gateway()
|
|
||||||
|
|
||||||
existing_entry = await self.async_set_unique_id(
|
|
||||||
self._user_auth_details[CONF_USERNAME]
|
|
||||||
)
|
|
||||||
if not existing_entry:
|
|
||||||
return self.async_create_entry(title=info["title"], data=info["data"])
|
|
||||||
|
|
||||||
return self.async_update_reload_and_abort(existing_entry, data=info["data"])
|
|
||||||
|
@@ -1,5 +1,7 @@
|
|||||||
"""Constants for August devices."""
|
"""Constants for August devices."""
|
||||||
|
|
||||||
|
from yalexs.const import Brand
|
||||||
|
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
|
|
||||||
DEFAULT_TIMEOUT = 25
|
DEFAULT_TIMEOUT = 25
|
||||||
@@ -9,6 +11,8 @@ CONF_BRAND = "brand"
|
|||||||
CONF_LOGIN_METHOD = "login_method"
|
CONF_LOGIN_METHOD = "login_method"
|
||||||
CONF_INSTALL_ID = "install_id"
|
CONF_INSTALL_ID = "install_id"
|
||||||
|
|
||||||
|
DEFAULT_AUGUST_BRAND = Brand.YALE_AUGUST
|
||||||
|
|
||||||
VERIFICATION_CODE_KEY = "verification_code"
|
VERIFICATION_CODE_KEY = "verification_code"
|
||||||
|
|
||||||
NOTIFICATION_ID = "august_notification"
|
NOTIFICATION_ID = "august_notification"
|
||||||
|
@@ -1,30 +1,43 @@
|
|||||||
"""Handle August connection setup and authentication."""
|
"""Handle August connection setup and authentication."""
|
||||||
|
|
||||||
from typing import Any
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from yalexs.const import DEFAULT_BRAND
|
from aiohttp import ClientSession
|
||||||
|
from yalexs.authenticator_common import Authentication, AuthenticationState
|
||||||
from yalexs.manager.gateway import Gateway
|
from yalexs.manager.gateway import Gateway
|
||||||
|
|
||||||
from homeassistant.const import CONF_USERNAME
|
from homeassistant.helpers import config_entry_oauth2_flow
|
||||||
|
|
||||||
from .const import (
|
_LOGGER = logging.getLogger(__name__)
|
||||||
CONF_ACCESS_TOKEN_CACHE_FILE,
|
|
||||||
CONF_BRAND,
|
|
||||||
CONF_INSTALL_ID,
|
|
||||||
CONF_LOGIN_METHOD,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AugustGateway(Gateway):
|
class AugustGateway(Gateway):
|
||||||
"""Handle the connection to August."""
|
"""Handle the connection to August."""
|
||||||
|
|
||||||
def config_entry(self) -> dict[str, Any]:
|
def __init__(
|
||||||
"""Config entry."""
|
self,
|
||||||
assert self._config is not None
|
config_path: Path,
|
||||||
return {
|
aiohttp_session: ClientSession,
|
||||||
CONF_BRAND: self._config.get(CONF_BRAND, DEFAULT_BRAND),
|
oauth_session: config_entry_oauth2_flow.OAuth2Session,
|
||||||
CONF_LOGIN_METHOD: self._config[CONF_LOGIN_METHOD],
|
) -> None:
|
||||||
CONF_USERNAME: self._config[CONF_USERNAME],
|
"""Init the connection."""
|
||||||
CONF_INSTALL_ID: self._config.get(CONF_INSTALL_ID),
|
super().__init__(config_path, aiohttp_session)
|
||||||
CONF_ACCESS_TOKEN_CACHE_FILE: self._access_token_cache_file,
|
self._oauth_session = oauth_session
|
||||||
}
|
|
||||||
|
async def async_get_access_token(self) -> str:
|
||||||
|
"""Get access token."""
|
||||||
|
await self._oauth_session.async_ensure_token_valid()
|
||||||
|
return self._oauth_session.token["access_token"]
|
||||||
|
|
||||||
|
async def async_refresh_access_token_if_needed(self) -> None:
|
||||||
|
"""Refresh the access token if needed."""
|
||||||
|
await self._oauth_session.async_ensure_token_valid()
|
||||||
|
|
||||||
|
async def async_authenticate(self) -> Authentication:
|
||||||
|
"""Authenticate with the details provided to setup."""
|
||||||
|
await self._oauth_session.async_ensure_token_valid()
|
||||||
|
self.authentication = Authentication(
|
||||||
|
AuthenticationState.AUTHENTICATED, None, None, None
|
||||||
|
)
|
||||||
|
return self.authentication
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
"name": "August",
|
"name": "August",
|
||||||
"codeowners": ["@bdraco"],
|
"codeowners": ["@bdraco"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
|
"dependencies": ["application_credentials", "cloud"],
|
||||||
"dhcp": [
|
"dhcp": [
|
||||||
{
|
{
|
||||||
"hostname": "connect",
|
"hostname": "connect",
|
||||||
@@ -28,5 +29,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pubnub", "yalexs"],
|
"loggers": ["pubnub", "yalexs"],
|
||||||
"requirements": ["yalexs==8.11.1", "yalexs-ble==3.1.2"]
|
"requirements": ["yalexs==8.12.0", "yalexs-ble==3.1.2"]
|
||||||
}
|
}
|
||||||
|
@@ -6,42 +6,34 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
"error": {
|
"step": {
|
||||||
"unhandled": "Unhandled error: {error}",
|
"pick_implementation": {
|
||||||
"invalid_verification_code": "Invalid verification code",
|
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"data": {
|
||||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||||
|
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||||
|
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||||
|
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||||
|
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||||
|
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||||
|
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||||
|
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||||
|
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||||
|
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||||
|
"reauth_invalid_user": "Reauthenticate must use the same account."
|
||||||
},
|
},
|
||||||
"step": {
|
"create_entry": {
|
||||||
"validation": {
|
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||||
"title": "Two-factor authentication",
|
|
||||||
"data": {
|
|
||||||
"verification_code": "Verification code"
|
|
||||||
},
|
|
||||||
"description": "Please check your {login_method} ({username}) and enter the verification code below. Codes may take a few minutes to arrive."
|
|
||||||
},
|
|
||||||
"user_validate": {
|
|
||||||
"description": "It is recommended to use the 'email' login method as some brands may not work with the 'phone' method. If the Login Method is 'email', Username is the email address. If the Login Method is 'phone', Username is the phone number in the format '+NNNNNNNNN'. If you choose the wrong brand, you may be able to authenticate initially; however, you will not be able to operate devices. If you are unsure of the brand, create the integration again and try another brand.",
|
|
||||||
"data": {
|
|
||||||
"brand": "Brand",
|
|
||||||
"login_method": "Login Method",
|
|
||||||
"username": "[%key:common::config_flow::data::username%]",
|
|
||||||
"password": "[%key:common::config_flow::data::password%]"
|
|
||||||
},
|
|
||||||
"title": "Set up an August account"
|
|
||||||
},
|
|
||||||
"reauth_validate": {
|
|
||||||
"description": "Choose the correct brand for your device, and enter the password for {username}. If you choose the wrong brand, you may be able to authenticate initially; however, you will not be able to operate devices. If you are unsure of the brand, create the integration again and try another brand.",
|
|
||||||
"data": {
|
|
||||||
"brand": "[%key:component::august::config::step::user_validate::data::brand%]",
|
|
||||||
"password": "[%key:common::config_flow::data::password%]"
|
|
||||||
},
|
|
||||||
"title": "Reauthenticate an August account"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
@@ -199,23 +199,19 @@ class AuthProvidersView(HomeAssistantView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _prepare_result_json(
|
def _prepare_result_json(result: AuthFlowResult) -> dict[str, Any]:
|
||||||
result: AuthFlowResult,
|
"""Convert result to JSON serializable dict."""
|
||||||
) -> AuthFlowResult:
|
|
||||||
"""Convert result to JSON."""
|
|
||||||
if result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY:
|
if result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY:
|
||||||
data = result.copy()
|
return {
|
||||||
data.pop("result")
|
key: val for key, val in result.items() if key not in ("result", "data")
|
||||||
data.pop("data")
|
}
|
||||||
return data
|
|
||||||
|
|
||||||
if result["type"] != data_entry_flow.FlowResultType.FORM:
|
if result["type"] != data_entry_flow.FlowResultType.FORM:
|
||||||
return result
|
return result # type: ignore[return-value]
|
||||||
|
|
||||||
data = result.copy()
|
data = dict(result)
|
||||||
|
if (schema := result["data_schema"]) is None:
|
||||||
if (schema := data["data_schema"]) is None:
|
data["data_schema"] = []
|
||||||
data["data_schema"] = [] # type: ignore[typeddict-item] # json result type
|
|
||||||
else:
|
else:
|
||||||
data["data_schema"] = voluptuous_serialize.convert(schema)
|
data["data_schema"] = voluptuous_serialize.convert(schema)
|
||||||
|
|
||||||
|
@@ -149,20 +149,16 @@ def websocket_depose_mfa(
|
|||||||
hass.async_create_task(async_depose(msg))
|
hass.async_create_task(async_depose(msg))
|
||||||
|
|
||||||
|
|
||||||
def _prepare_result_json(
|
def _prepare_result_json(result: data_entry_flow.FlowResult) -> dict[str, Any]:
|
||||||
result: data_entry_flow.FlowResult,
|
"""Convert result to JSON serializable dict."""
|
||||||
) -> data_entry_flow.FlowResult:
|
|
||||||
"""Convert result to JSON."""
|
|
||||||
if result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY:
|
if result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY:
|
||||||
return result.copy()
|
return dict(result)
|
||||||
|
|
||||||
if result["type"] != data_entry_flow.FlowResultType.FORM:
|
if result["type"] != data_entry_flow.FlowResultType.FORM:
|
||||||
return result
|
return result # type: ignore[return-value]
|
||||||
|
|
||||||
data = result.copy()
|
data = dict(result)
|
||||||
|
if (schema := result["data_schema"]) is None:
|
||||||
if (schema := data["data_schema"]) is None:
|
data["data_schema"] = []
|
||||||
data["data_schema"] = [] # type: ignore[typeddict-item] # json result type
|
|
||||||
else:
|
else:
|
||||||
data["data_schema"] = voluptuous_serialize.convert(schema)
|
data["data_schema"] = voluptuous_serialize.convert(schema)
|
||||||
|
|
||||||
|
@@ -5,6 +5,7 @@ from __future__ import annotations
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
API_ABS_HUMID = "abs_humid"
|
||||||
API_CO2 = "carbon_dioxide"
|
API_CO2 = "carbon_dioxide"
|
||||||
API_DEW_POINT = "dew_point"
|
API_DEW_POINT = "dew_point"
|
||||||
API_DUST = "dust"
|
API_DUST = "dust"
|
||||||
|
@@ -18,6 +18,7 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_CONNECTIONS,
|
ATTR_CONNECTIONS,
|
||||||
ATTR_SW_VERSION,
|
ATTR_SW_VERSION,
|
||||||
|
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
CONCENTRATION_PARTS_PER_BILLION,
|
CONCENTRATION_PARTS_PER_BILLION,
|
||||||
CONCENTRATION_PARTS_PER_MILLION,
|
CONCENTRATION_PARTS_PER_MILLION,
|
||||||
@@ -33,6 +34,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
|
API_ABS_HUMID,
|
||||||
API_CO2,
|
API_CO2,
|
||||||
API_DEW_POINT,
|
API_DEW_POINT,
|
||||||
API_DUST,
|
API_DUST,
|
||||||
@@ -120,6 +122,14 @@ SENSOR_TYPES: tuple[AwairSensorEntityDescription, ...] = (
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
|
AwairSensorEntityDescription(
|
||||||
|
key=API_ABS_HUMID,
|
||||||
|
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY,
|
||||||
|
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||||
|
unique_id_tag="absolute_humidity",
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
SENSOR_TYPES_DUST: tuple[AwairSensorEntityDescription, ...] = (
|
SENSOR_TYPES_DUST: tuple[AwairSensorEntityDescription, ...] = (
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/azure_devops",
|
"documentation": "https://www.home-assistant.io/integrations/azure_devops",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioazuredevops"],
|
"loggers": ["aioazuredevops"],
|
||||||
"requirements": ["aioazuredevops==2.2.1"]
|
"requirements": ["aioazuredevops==2.2.2"]
|
||||||
}
|
}
|
||||||
|
@@ -127,7 +127,6 @@ class BackupConfigData:
|
|||||||
schedule=BackupSchedule(
|
schedule=BackupSchedule(
|
||||||
days=days,
|
days=days,
|
||||||
recurrence=ScheduleRecurrence(data["schedule"]["recurrence"]),
|
recurrence=ScheduleRecurrence(data["schedule"]["recurrence"]),
|
||||||
state=ScheduleState(data["schedule"].get("state", ScheduleState.NEVER)),
|
|
||||||
time=time,
|
time=time,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -453,7 +452,6 @@ class StoredBackupSchedule(TypedDict):
|
|||||||
|
|
||||||
days: list[Day]
|
days: list[Day]
|
||||||
recurrence: ScheduleRecurrence
|
recurrence: ScheduleRecurrence
|
||||||
state: ScheduleState
|
|
||||||
time: str | None
|
time: str | None
|
||||||
|
|
||||||
|
|
||||||
@@ -462,7 +460,6 @@ class ScheduleParametersDict(TypedDict, total=False):
|
|||||||
|
|
||||||
days: list[Day]
|
days: list[Day]
|
||||||
recurrence: ScheduleRecurrence
|
recurrence: ScheduleRecurrence
|
||||||
state: ScheduleState
|
|
||||||
time: dt.time | None
|
time: dt.time | None
|
||||||
|
|
||||||
|
|
||||||
@@ -486,32 +483,12 @@ class ScheduleRecurrence(StrEnum):
|
|||||||
CUSTOM_DAYS = "custom_days"
|
CUSTOM_DAYS = "custom_days"
|
||||||
|
|
||||||
|
|
||||||
class ScheduleState(StrEnum):
|
|
||||||
"""Represent the schedule recurrence.
|
|
||||||
|
|
||||||
This is deprecated and can be remove in HA Core 2025.8.
|
|
||||||
"""
|
|
||||||
|
|
||||||
NEVER = "never"
|
|
||||||
DAILY = "daily"
|
|
||||||
MONDAY = "mon"
|
|
||||||
TUESDAY = "tue"
|
|
||||||
WEDNESDAY = "wed"
|
|
||||||
THURSDAY = "thu"
|
|
||||||
FRIDAY = "fri"
|
|
||||||
SATURDAY = "sat"
|
|
||||||
SUNDAY = "sun"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(kw_only=True)
|
@dataclass(kw_only=True)
|
||||||
class BackupSchedule:
|
class BackupSchedule:
|
||||||
"""Represent the backup schedule."""
|
"""Represent the backup schedule."""
|
||||||
|
|
||||||
days: list[Day] = field(default_factory=list)
|
days: list[Day] = field(default_factory=list)
|
||||||
recurrence: ScheduleRecurrence = ScheduleRecurrence.NEVER
|
recurrence: ScheduleRecurrence = ScheduleRecurrence.NEVER
|
||||||
# Although no longer used, state is kept for backwards compatibility.
|
|
||||||
# It can be removed in HA Core 2025.8.
|
|
||||||
state: ScheduleState = ScheduleState.NEVER
|
|
||||||
time: dt.time | None = None
|
time: dt.time | None = None
|
||||||
cron_event: CronSim | None = field(init=False, default=None)
|
cron_event: CronSim | None = field(init=False, default=None)
|
||||||
next_automatic_backup: datetime | None = field(init=False, default=None)
|
next_automatic_backup: datetime | None = field(init=False, default=None)
|
||||||
@@ -610,7 +587,6 @@ class BackupSchedule:
|
|||||||
return StoredBackupSchedule(
|
return StoredBackupSchedule(
|
||||||
days=self.days,
|
days=self.days,
|
||||||
recurrence=self.recurrence,
|
recurrence=self.recurrence,
|
||||||
state=self.state,
|
|
||||||
time=self.time.isoformat() if self.time else None,
|
time=self.time.isoformat() if self.time else None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -331,9 +331,6 @@ async def handle_config_info(
|
|||||||
"""Send the stored backup config."""
|
"""Send the stored backup config."""
|
||||||
manager = hass.data[DATA_MANAGER]
|
manager = hass.data[DATA_MANAGER]
|
||||||
config = manager.config.data.to_dict()
|
config = manager.config.data.to_dict()
|
||||||
# Remove state from schedule, it's not needed in the frontend
|
|
||||||
# mypy doesn't like deleting from TypedDict, ignore it
|
|
||||||
del config["schedule"]["state"] # type: ignore[misc]
|
|
||||||
connection.send_result(
|
connection.send_result(
|
||||||
msg["id"],
|
msg["id"],
|
||||||
{
|
{
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["bluecurrent_api"],
|
"loggers": ["bluecurrent_api"],
|
||||||
"requirements": ["bluecurrent-api==1.2.4"]
|
"requirements": ["bluecurrent-api==1.3.1"]
|
||||||
}
|
}
|
||||||
|
@@ -388,12 +388,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
mode = BluetoothScanningMode.PASSIVE if passive else BluetoothScanningMode.ACTIVE
|
mode = BluetoothScanningMode.PASSIVE if passive else BluetoothScanningMode.ACTIVE
|
||||||
scanner = HaScanner(mode, adapter, address)
|
scanner = HaScanner(mode, adapter, address)
|
||||||
scanner.async_setup()
|
scanner.async_setup()
|
||||||
try:
|
|
||||||
await scanner.async_start()
|
|
||||||
except (RuntimeError, ScannerStartError) as err:
|
|
||||||
raise ConfigEntryNotReady(
|
|
||||||
f"{adapter_human_name(adapter, address)}: {err}"
|
|
||||||
) from err
|
|
||||||
adapters = await manager.async_get_bluetooth_adapters()
|
adapters = await manager.async_get_bluetooth_adapters()
|
||||||
details = adapters[adapter]
|
details = adapters[adapter]
|
||||||
if entry.title == address:
|
if entry.title == address:
|
||||||
@@ -401,8 +395,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
entry, title=adapter_title(adapter, details)
|
entry, title=adapter_title(adapter, details)
|
||||||
)
|
)
|
||||||
slots: int = details.get(ADAPTER_CONNECTION_SLOTS) or DEFAULT_CONNECTION_SLOTS
|
slots: int = details.get(ADAPTER_CONNECTION_SLOTS) or DEFAULT_CONNECTION_SLOTS
|
||||||
|
# Register the scanner before starting so
|
||||||
|
# any raw advertisement data can be processed
|
||||||
entry.async_on_unload(async_register_scanner(hass, scanner, connection_slots=slots))
|
entry.async_on_unload(async_register_scanner(hass, scanner, connection_slots=slots))
|
||||||
await async_update_device(hass, entry, adapter, details)
|
await async_update_device(hass, entry, adapter, details)
|
||||||
|
try:
|
||||||
|
await scanner.async_start()
|
||||||
|
except (RuntimeError, ScannerStartError) as err:
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
f"{adapter_human_name(adapter, address)}: {err}"
|
||||||
|
) from err
|
||||||
entry.async_on_unload(entry.add_update_listener(async_update_listener))
|
entry.async_on_unload(entry.add_update_listener(async_update_listener))
|
||||||
entry.async_on_unload(scanner.async_stop)
|
entry.async_on_unload(scanner.async_stop)
|
||||||
return True
|
return True
|
||||||
|
@@ -235,10 +235,9 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
|||||||
|
|
||||||
def _async_save_scanner_history(self, scanner: BaseHaScanner) -> None:
|
def _async_save_scanner_history(self, scanner: BaseHaScanner) -> None:
|
||||||
"""Save the scanner history."""
|
"""Save the scanner history."""
|
||||||
if isinstance(scanner, BaseHaRemoteScanner):
|
self.storage.async_set_advertisement_history(
|
||||||
self.storage.async_set_advertisement_history(
|
scanner.source, scanner.serialize_discovered_devices()
|
||||||
scanner.source, scanner.serialize_discovered_devices()
|
)
|
||||||
)
|
|
||||||
|
|
||||||
def _async_unregister_scanner(
|
def _async_unregister_scanner(
|
||||||
self, scanner: BaseHaScanner, unregister: CALLBACK_TYPE
|
self, scanner: BaseHaScanner, unregister: CALLBACK_TYPE
|
||||||
@@ -285,9 +284,8 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
|||||||
connection_slots: int | None = None,
|
connection_slots: int | None = None,
|
||||||
) -> CALLBACK_TYPE:
|
) -> CALLBACK_TYPE:
|
||||||
"""Register a scanner."""
|
"""Register a scanner."""
|
||||||
if isinstance(scanner, BaseHaRemoteScanner):
|
if history := self.storage.async_get_advertisement_history(scanner.source):
|
||||||
if history := self.storage.async_get_advertisement_history(scanner.source):
|
scanner.restore_discovered_devices(history)
|
||||||
scanner.restore_discovered_devices(history)
|
|
||||||
|
|
||||||
unregister = super().async_register_scanner(scanner, connection_slots)
|
unregister = super().async_register_scanner(scanner, connection_slots)
|
||||||
return partial(self._async_unregister_scanner, scanner, unregister)
|
return partial(self._async_unregister_scanner, scanner, unregister)
|
||||||
|
@@ -16,11 +16,11 @@
|
|||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"bleak==1.0.1",
|
"bleak==1.0.1",
|
||||||
"bleak-retry-connector==4.0.0",
|
"bleak-retry-connector==4.3.0",
|
||||||
"bluetooth-adapters==2.0.0",
|
"bluetooth-adapters==2.0.0",
|
||||||
"bluetooth-auto-recovery==1.5.2",
|
"bluetooth-auto-recovery==1.5.2",
|
||||||
"bluetooth-data-tools==1.28.2",
|
"bluetooth-data-tools==1.28.2",
|
||||||
"dbus-fast==2.44.3",
|
"dbus-fast==2.44.3",
|
||||||
"habluetooth==4.0.2"
|
"habluetooth==5.1.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -39,7 +39,13 @@ def async_setup(hass: HomeAssistant) -> None:
|
|||||||
def serialize_service_info(
|
def serialize_service_info(
|
||||||
service_info: BluetoothServiceInfoBleak, time_diff: float
|
service_info: BluetoothServiceInfoBleak, time_diff: float
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""Serialize a BluetoothServiceInfoBleak object."""
|
"""Serialize a BluetoothServiceInfoBleak object.
|
||||||
|
|
||||||
|
The raw field is included for:
|
||||||
|
1. Debugging - to see the actual advertisement packet
|
||||||
|
2. Data freshness - manufacturer_data and service_data are aggregated
|
||||||
|
across multiple advertisements, raw shows the latest packet only
|
||||||
|
"""
|
||||||
return {
|
return {
|
||||||
"name": service_info.name,
|
"name": service_info.name,
|
||||||
"address": service_info.address,
|
"address": service_info.address,
|
||||||
@@ -57,6 +63,7 @@ def serialize_service_info(
|
|||||||
"connectable": service_info.connectable,
|
"connectable": service_info.connectable,
|
||||||
"time": service_info.time + time_diff,
|
"time": service_info.time + time_diff,
|
||||||
"tx_power": service_info.tx_power,
|
"tx_power": service_info.tx_power,
|
||||||
|
"raw": service_info.raw.hex() if service_info.raw else None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@@ -95,7 +95,7 @@
|
|||||||
"name": "Battery missing"
|
"name": "Battery missing"
|
||||||
},
|
},
|
||||||
"panel_fault_ac_fail": {
|
"panel_fault_ac_fail": {
|
||||||
"name": "AC Failure"
|
"name": "AC failure"
|
||||||
},
|
},
|
||||||
"panel_fault_parameter_crc_fail_in_pif": {
|
"panel_fault_parameter_crc_fail_in_pif": {
|
||||||
"name": "CRC failure in panel configuration"
|
"name": "CRC failure in panel configuration"
|
||||||
|
@@ -69,12 +69,7 @@ class SHCEntity(SHCBaseEntity):
|
|||||||
manufacturer=device.manufacturer,
|
manufacturer=device.manufacturer,
|
||||||
model=device.device_model,
|
model=device.device_model,
|
||||||
name=device.name,
|
name=device.name,
|
||||||
via_device=(
|
via_device=(DOMAIN, device.root_device_id),
|
||||||
DOMAIN,
|
|
||||||
device.parent_device_id
|
|
||||||
if device.parent_device_id is not None
|
|
||||||
else parent_id,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
super().__init__(device=device, parent_id=parent_id, entry_id=entry_id)
|
super().__init__(device=device, parent_id=parent_id, entry_id=entry_id)
|
||||||
|
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
|
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["boschshcpy"],
|
"loggers": ["boschshcpy"],
|
||||||
"requirements": ["boschshcpy==0.2.91"],
|
"requirements": ["boschshcpy==0.2.107"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_http._tcp.local.",
|
"type": "_http._tcp.local.",
|
||||||
|
@@ -53,8 +53,7 @@ async def async_setup_entry(
|
|||||||
assert unique_id is not None
|
assert unique_id is not None
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
BraviaTVButton(coordinator, unique_id, config_entry.title, description)
|
BraviaTVButton(coordinator, unique_id, description) for description in BUTTONS
|
||||||
for description in BUTTONS
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -67,11 +66,10 @@ class BraviaTVButton(BraviaTVEntity, ButtonEntity):
|
|||||||
self,
|
self,
|
||||||
coordinator: BraviaTVCoordinator,
|
coordinator: BraviaTVCoordinator,
|
||||||
unique_id: str,
|
unique_id: str,
|
||||||
model: str,
|
|
||||||
description: BraviaTVButtonDescription,
|
description: BraviaTVButtonDescription,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the button."""
|
"""Initialize the button."""
|
||||||
super().__init__(coordinator, unique_id, model)
|
super().__init__(coordinator, unique_id)
|
||||||
self._attr_unique_id = f"{unique_id}_{description.key}"
|
self._attr_unique_id = f"{unique_id}_{description.key}"
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
|
|
||||||
|
@@ -79,14 +79,16 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
system_info = await self.client.get_system_info()
|
system_info = await self.client.get_system_info()
|
||||||
cid = system_info[ATTR_CID].lower()
|
cid = system_info[ATTR_CID].lower()
|
||||||
title = system_info[ATTR_MODEL]
|
|
||||||
|
|
||||||
self.device_config[CONF_MAC] = system_info[ATTR_MAC]
|
self.device_config[CONF_MAC] = system_info[ATTR_MAC]
|
||||||
|
|
||||||
await self.async_set_unique_id(cid)
|
await self.async_set_unique_id(cid)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
return self.async_create_entry(title=title, data=self.device_config)
|
return self.async_create_entry(
|
||||||
|
title=f"{system_info['name']} {system_info[ATTR_MODEL]}",
|
||||||
|
data=self.device_config,
|
||||||
|
)
|
||||||
|
|
||||||
async def async_reauth_device(self) -> ConfigFlowResult:
|
async def async_reauth_device(self) -> ConfigFlowResult:
|
||||||
"""Reauthorize Bravia TV device from config."""
|
"""Reauthorize Bravia TV device from config."""
|
||||||
|
@@ -81,6 +81,7 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
|
|||||||
self.use_psk = config_entry.data.get(CONF_USE_PSK, False)
|
self.use_psk = config_entry.data.get(CONF_USE_PSK, False)
|
||||||
self.client_id = config_entry.data.get(CONF_CLIENT_ID, LEGACY_CLIENT_ID)
|
self.client_id = config_entry.data.get(CONF_CLIENT_ID, LEGACY_CLIENT_ID)
|
||||||
self.nickname = config_entry.data.get(CONF_NICKNAME, NICKNAME_PREFIX)
|
self.nickname = config_entry.data.get(CONF_NICKNAME, NICKNAME_PREFIX)
|
||||||
|
self.system_info: dict[str, str] = {}
|
||||||
self.source: str | None = None
|
self.source: str | None = None
|
||||||
self.source_list: list[str] = []
|
self.source_list: list[str] = []
|
||||||
self.source_map: dict[str, dict] = {}
|
self.source_map: dict[str, dict] = {}
|
||||||
@@ -150,6 +151,9 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
|
|||||||
self.is_on = power_status == "active"
|
self.is_on = power_status == "active"
|
||||||
self.skipped_updates = 0
|
self.skipped_updates = 0
|
||||||
|
|
||||||
|
if not self.system_info:
|
||||||
|
self.system_info = await self.client.get_system_info()
|
||||||
|
|
||||||
if self.is_on is False:
|
if self.is_on is False:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@@ -12,23 +12,16 @@ class BraviaTVEntity(CoordinatorEntity[BraviaTVCoordinator]):
|
|||||||
|
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, coordinator: BraviaTVCoordinator, unique_id: str) -> None:
|
||||||
self,
|
|
||||||
coordinator: BraviaTVCoordinator,
|
|
||||||
unique_id: str,
|
|
||||||
model: str,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize the entity."""
|
"""Initialize the entity."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
|
||||||
self._attr_unique_id = unique_id
|
self._attr_unique_id = unique_id
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, unique_id)},
|
identifiers={(DOMAIN, unique_id)},
|
||||||
|
connections={(CONNECTION_NETWORK_MAC, coordinator.system_info["macAddr"])},
|
||||||
manufacturer=ATTR_MANUFACTURER,
|
manufacturer=ATTR_MANUFACTURER,
|
||||||
model=model,
|
model_id=coordinator.system_info["model"],
|
||||||
name=f"{ATTR_MANUFACTURER} {model}",
|
hw_version=coordinator.system_info["generation"],
|
||||||
|
serial_number=coordinator.system_info["serial"],
|
||||||
)
|
)
|
||||||
if coordinator.client.mac is not None:
|
|
||||||
self._attr_device_info["connections"] = {
|
|
||||||
(CONNECTION_NETWORK_MAC, coordinator.client.mac)
|
|
||||||
}
|
|
||||||
|
@@ -34,9 +34,7 @@ async def async_setup_entry(
|
|||||||
unique_id = config_entry.unique_id
|
unique_id = config_entry.unique_id
|
||||||
assert unique_id is not None
|
assert unique_id is not None
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities([BraviaTVMediaPlayer(coordinator, unique_id)])
|
||||||
[BraviaTVMediaPlayer(coordinator, unique_id, config_entry.title)]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):
|
class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):
|
||||||
|
@@ -24,7 +24,7 @@ async def async_setup_entry(
|
|||||||
unique_id = config_entry.unique_id
|
unique_id = config_entry.unique_id
|
||||||
assert unique_id is not None
|
assert unique_id is not None
|
||||||
|
|
||||||
async_add_entities([BraviaTVRemote(coordinator, unique_id, config_entry.title)])
|
async_add_entities([BraviaTVRemote(coordinator, unique_id)])
|
||||||
|
|
||||||
|
|
||||||
class BraviaTVRemote(BraviaTVEntity, RemoteEntity):
|
class BraviaTVRemote(BraviaTVEntity, RemoteEntity):
|
||||||
|
@@ -8,7 +8,7 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||||
"requirements": ["brother==5.0.0"],
|
"requirements": ["brother==5.0.1"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_printer._tcp.local.",
|
"type": "_printer._tcp.local.",
|
||||||
|
@@ -2,7 +2,16 @@
|
|||||||
|
|
||||||
import dataclasses
|
import dataclasses
|
||||||
|
|
||||||
from bsblan import BSBLAN, BSBLANConfig, Device, Info, StaticState
|
from bsblan import (
|
||||||
|
BSBLAN,
|
||||||
|
BSBLANAuthError,
|
||||||
|
BSBLANConfig,
|
||||||
|
BSBLANConnectionError,
|
||||||
|
BSBLANError,
|
||||||
|
Device,
|
||||||
|
Info,
|
||||||
|
StaticState,
|
||||||
|
)
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
@@ -13,9 +22,14 @@ from homeassistant.const import (
|
|||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import (
|
||||||
|
ConfigEntryAuthFailed,
|
||||||
|
ConfigEntryError,
|
||||||
|
ConfigEntryNotReady,
|
||||||
|
)
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
from .const import CONF_PASSKEY
|
from .const import CONF_PASSKEY, DOMAIN
|
||||||
from .coordinator import BSBLanUpdateCoordinator
|
from .coordinator import BSBLanUpdateCoordinator
|
||||||
|
|
||||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
||||||
@@ -54,10 +68,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
|||||||
coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan)
|
coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan)
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
# Fetch all required data concurrently
|
try:
|
||||||
device = await bsblan.device()
|
# Fetch all required data sequentially
|
||||||
info = await bsblan.info()
|
device = await bsblan.device()
|
||||||
static = await bsblan.static_values()
|
info = await bsblan.info()
|
||||||
|
static = await bsblan.static_values()
|
||||||
|
except BSBLANConnectionError as err:
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="setup_connection_error",
|
||||||
|
translation_placeholders={"host": entry.data[CONF_HOST]},
|
||||||
|
) from err
|
||||||
|
except BSBLANAuthError as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="setup_auth_error",
|
||||||
|
) from err
|
||||||
|
except BSBLANError as err:
|
||||||
|
raise ConfigEntryError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="setup_general_error",
|
||||||
|
) from err
|
||||||
|
|
||||||
entry.runtime_data = BSBLanData(
|
entry.runtime_data = BSBLanData(
|
||||||
client=bsblan,
|
client=bsblan,
|
||||||
|
@@ -41,6 +41,11 @@
|
|||||||
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
|
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
|
||||||
"username": "[%key:common::config_flow::data::username%]",
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
"password": "[%key:common::config_flow::data::password%]"
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"passkey": "[%key:component::bsblan::config::step::user::data_description::passkey%]",
|
||||||
|
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
|
||||||
|
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -66,6 +71,15 @@
|
|||||||
},
|
},
|
||||||
"set_operation_mode_error": {
|
"set_operation_mode_error": {
|
||||||
"message": "An error occurred while setting the operation mode"
|
"message": "An error occurred while setting the operation mode"
|
||||||
|
},
|
||||||
|
"setup_connection_error": {
|
||||||
|
"message": "Failed to retrieve static device data from BSB-Lan device at {host}"
|
||||||
|
},
|
||||||
|
"setup_auth_error": {
|
||||||
|
"message": "Authentication failed while retrieving static device data"
|
||||||
|
},
|
||||||
|
"setup_general_error": {
|
||||||
|
"message": "An unknown error occurred while retrieving static device data"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
@@ -72,7 +72,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
# Conductivity (µS/cm)
|
# Conductivity (μS/cm)
|
||||||
(
|
(
|
||||||
BTHomeSensorDeviceClass.CONDUCTIVITY,
|
BTHomeSensorDeviceClass.CONDUCTIVITY,
|
||||||
Units.CONDUCTIVITY,
|
Units.CONDUCTIVITY,
|
||||||
@@ -215,7 +215,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
# PM10 (µg/m3)
|
# PM10 (μg/m3)
|
||||||
(
|
(
|
||||||
BTHomeSensorDeviceClass.PM10,
|
BTHomeSensorDeviceClass.PM10,
|
||||||
Units.CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
Units.CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
@@ -225,7 +225,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
# PM2.5 (µg/m3)
|
# PM2.5 (μg/m3)
|
||||||
(
|
(
|
||||||
BTHomeSensorDeviceClass.PM25,
|
BTHomeSensorDeviceClass.PM25,
|
||||||
Units.CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
Units.CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
@@ -318,7 +318,7 @@ SENSOR_DESCRIPTIONS = {
|
|||||||
key=str(BTHomeSensorDeviceClass.UV_INDEX),
|
key=str(BTHomeSensorDeviceClass.UV_INDEX),
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
# Volatile organic Compounds (VOC) (µg/m3)
|
# Volatile organic Compounds (VOC) (μg/m3)
|
||||||
(
|
(
|
||||||
BTHomeSensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
BTHomeSensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||||
Units.CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
Units.CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
|
@@ -25,7 +25,7 @@
|
|||||||
"services": {
|
"services": {
|
||||||
"press": {
|
"press": {
|
||||||
"name": "Press",
|
"name": "Press",
|
||||||
"description": "Press the button entity."
|
"description": "Presses a button entity."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -14,7 +14,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/cast",
|
"documentation": "https://www.home-assistant.io/integrations/cast",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["casttube", "pychromecast"],
|
"loggers": ["casttube", "pychromecast"],
|
||||||
"requirements": ["PyChromecast==14.0.7"],
|
"requirements": ["PyChromecast==14.0.9"],
|
||||||
"single_config_entry": true,
|
"single_config_entry": true,
|
||||||
"zeroconf": ["_googlecast._tcp.local."]
|
"zeroconf": ["_googlecast._tcp.local."]
|
||||||
}
|
}
|
||||||
|
@@ -255,7 +255,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||||||
)
|
)
|
||||||
|
|
||||||
entity_description: ClimateEntityDescription
|
entity_description: ClimateEntityDescription
|
||||||
_attr_current_humidity: int | None = None
|
_attr_current_humidity: float | None = None
|
||||||
_attr_current_temperature: float | None = None
|
_attr_current_temperature: float | None = None
|
||||||
_attr_fan_mode: str | None
|
_attr_fan_mode: str | None
|
||||||
_attr_fan_modes: list[str] | None
|
_attr_fan_modes: list[str] | None
|
||||||
|
@@ -6,12 +6,16 @@ import asyncio
|
|||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from http import HTTPStatus
|
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from hass_nabucasa import Cloud, cloud_api
|
from hass_nabucasa import AlexaApiError, Cloud
|
||||||
|
from hass_nabucasa.alexa_api import (
|
||||||
|
AlexaAccessTokenDetails,
|
||||||
|
AlexaApiNeedsRelinkError,
|
||||||
|
AlexaApiNoTokenError,
|
||||||
|
)
|
||||||
from yarl import URL
|
from yarl import URL
|
||||||
|
|
||||||
from homeassistant.components import persistent_notification
|
from homeassistant.components import persistent_notification
|
||||||
@@ -146,7 +150,7 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||||||
self._cloud_user = cloud_user
|
self._cloud_user = cloud_user
|
||||||
self._prefs = prefs
|
self._prefs = prefs
|
||||||
self._cloud = cloud
|
self._cloud = cloud
|
||||||
self._token = None
|
self._token: str | None = None
|
||||||
self._token_valid: datetime | None = None
|
self._token_valid: datetime | None = None
|
||||||
self._cur_entity_prefs = async_get_assistant_settings(hass, CLOUD_ALEXA)
|
self._cur_entity_prefs = async_get_assistant_settings(hass, CLOUD_ALEXA)
|
||||||
self._alexa_sync_unsub: Callable[[], None] | None = None
|
self._alexa_sync_unsub: Callable[[], None] | None = None
|
||||||
@@ -318,32 +322,31 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||||||
|
|
||||||
async def async_get_access_token(self) -> str | None:
|
async def async_get_access_token(self) -> str | None:
|
||||||
"""Get an access token."""
|
"""Get an access token."""
|
||||||
|
details: AlexaAccessTokenDetails | None
|
||||||
if self._token_valid is not None and self._token_valid > utcnow():
|
if self._token_valid is not None and self._token_valid > utcnow():
|
||||||
return self._token
|
return self._token
|
||||||
|
|
||||||
resp = await cloud_api.async_alexa_access_token(self._cloud)
|
try:
|
||||||
body = await resp.json()
|
details = await self._cloud.alexa_api.access_token()
|
||||||
|
except AlexaApiNeedsRelinkError as exception:
|
||||||
|
if self.should_report_state:
|
||||||
|
persistent_notification.async_create(
|
||||||
|
self.hass,
|
||||||
|
(
|
||||||
|
"There was an error reporting state to Alexa"
|
||||||
|
f" ({exception.reason}). Please re-link your Alexa skill via"
|
||||||
|
" the Alexa app to continue using it."
|
||||||
|
),
|
||||||
|
"Alexa state reporting disabled",
|
||||||
|
"cloud_alexa_report",
|
||||||
|
)
|
||||||
|
raise alexa_errors.RequireRelink from exception
|
||||||
|
except (AlexaApiNoTokenError, AlexaApiError) as exception:
|
||||||
|
raise alexa_errors.NoTokenAvailable from exception
|
||||||
|
|
||||||
if resp.status == HTTPStatus.BAD_REQUEST:
|
self._token = details["access_token"]
|
||||||
if body["reason"] in ("RefreshTokenNotFound", "UnknownRegion"):
|
self._endpoint = details["event_endpoint"]
|
||||||
if self.should_report_state:
|
self._token_valid = utcnow() + timedelta(seconds=details["expires_in"])
|
||||||
persistent_notification.async_create(
|
|
||||||
self.hass,
|
|
||||||
(
|
|
||||||
"There was an error reporting state to Alexa"
|
|
||||||
f" ({body['reason']}). Please re-link your Alexa skill via"
|
|
||||||
" the Alexa app to continue using it."
|
|
||||||
),
|
|
||||||
"Alexa state reporting disabled",
|
|
||||||
"cloud_alexa_report",
|
|
||||||
)
|
|
||||||
raise alexa_errors.RequireRelink
|
|
||||||
|
|
||||||
raise alexa_errors.NoTokenAvailable
|
|
||||||
|
|
||||||
self._token = body["access_token"]
|
|
||||||
self._endpoint = body["event_endpoint"]
|
|
||||||
self._token_valid = utcnow() + timedelta(seconds=body["expires_in"])
|
|
||||||
return self._token
|
return self._token
|
||||||
|
|
||||||
async def _async_prefs_updated(self, prefs: CloudPreferences) -> None:
|
async def _async_prefs_updated(self, prefs: CloudPreferences) -> None:
|
||||||
|
@@ -7,7 +7,7 @@ from http import HTTPStatus
|
|||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from hass_nabucasa import Cloud, cloud_api
|
from hass_nabucasa import Cloud
|
||||||
from hass_nabucasa.google_report_state import ErrorResponse
|
from hass_nabucasa.google_report_state import ErrorResponse
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
|
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
|
||||||
@@ -377,7 +377,7 @@ class CloudGoogleConfig(AbstractConfig):
|
|||||||
return HTTPStatus.OK
|
return HTTPStatus.OK
|
||||||
|
|
||||||
async with self._sync_entities_lock:
|
async with self._sync_entities_lock:
|
||||||
resp = await cloud_api.async_google_actions_request_sync(self._cloud)
|
resp = await self._cloud.google_report_state.request_sync()
|
||||||
return resp.status
|
return resp.status
|
||||||
|
|
||||||
async def async_connect_agent_user(self, agent_user_id: str) -> None:
|
async def async_connect_agent_user(self, agent_user_id: str) -> None:
|
||||||
|
@@ -13,6 +13,6 @@
|
|||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||||
"requirements": ["hass-nabucasa==0.111.1"],
|
"requirements": ["hass-nabucasa==1.0.0"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@@ -7,22 +7,18 @@ import logging
|
|||||||
|
|
||||||
from coinbase.rest import RESTClient
|
from coinbase.rest import RESTClient
|
||||||
from coinbase.rest.rest_base import HTTPError
|
from coinbase.rest.rest_base import HTTPError
|
||||||
from coinbase.wallet.client import Client as LegacyClient
|
|
||||||
from coinbase.wallet.error import AuthenticationError
|
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform
|
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.util import Throttle
|
from homeassistant.util import Throttle
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ACCOUNT_IS_VAULT,
|
ACCOUNT_IS_VAULT,
|
||||||
API_ACCOUNT_AMOUNT,
|
API_ACCOUNT_AMOUNT,
|
||||||
API_ACCOUNT_AVALIABLE,
|
API_ACCOUNT_AVALIABLE,
|
||||||
API_ACCOUNT_BALANCE,
|
|
||||||
API_ACCOUNT_CURRENCY,
|
API_ACCOUNT_CURRENCY,
|
||||||
API_ACCOUNT_CURRENCY_CODE,
|
|
||||||
API_ACCOUNT_HOLD,
|
API_ACCOUNT_HOLD,
|
||||||
API_ACCOUNT_ID,
|
API_ACCOUNT_ID,
|
||||||
API_ACCOUNT_NAME,
|
API_ACCOUNT_NAME,
|
||||||
@@ -31,12 +27,9 @@ from .const import (
|
|||||||
API_DATA,
|
API_DATA,
|
||||||
API_RATES_CURRENCY,
|
API_RATES_CURRENCY,
|
||||||
API_RESOURCE_TYPE,
|
API_RESOURCE_TYPE,
|
||||||
API_TYPE_VAULT,
|
|
||||||
API_V3_ACCOUNT_ID,
|
API_V3_ACCOUNT_ID,
|
||||||
API_V3_TYPE_VAULT,
|
API_V3_TYPE_VAULT,
|
||||||
CONF_CURRENCIES,
|
|
||||||
CONF_EXCHANGE_BASE,
|
CONF_EXCHANGE_BASE,
|
||||||
CONF_EXCHANGE_RATES,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -51,9 +44,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
|
|||||||
"""Set up Coinbase from a config entry."""
|
"""Set up Coinbase from a config entry."""
|
||||||
|
|
||||||
instance = await hass.async_add_executor_job(create_and_update_instance, entry)
|
instance = await hass.async_add_executor_job(create_and_update_instance, entry)
|
||||||
|
|
||||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
|
||||||
|
|
||||||
entry.runtime_data = instance
|
entry.runtime_data = instance
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
@@ -68,68 +58,28 @@ async def async_unload_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
|
|||||||
|
|
||||||
def create_and_update_instance(entry: CoinbaseConfigEntry) -> CoinbaseData:
|
def create_and_update_instance(entry: CoinbaseConfigEntry) -> CoinbaseData:
|
||||||
"""Create and update a Coinbase Data instance."""
|
"""Create and update a Coinbase Data instance."""
|
||||||
|
|
||||||
|
# Check if user is using deprecated v2 API credentials
|
||||||
if "organizations" not in entry.data[CONF_API_KEY]:
|
if "organizations" not in entry.data[CONF_API_KEY]:
|
||||||
client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
|
# Trigger reauthentication to ask user for v3 credentials
|
||||||
version = "v2"
|
raise ConfigEntryAuthFailed(
|
||||||
else:
|
"Your Coinbase API key appears to be for the deprecated v2 API. "
|
||||||
client = RESTClient(
|
"Please reconfigure with a new API key created for the v3 API. "
|
||||||
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
|
"Visit https://www.coinbase.com/developer-platform to create new credentials."
|
||||||
)
|
)
|
||||||
version = "v3"
|
|
||||||
|
client = RESTClient(
|
||||||
|
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
|
||||||
|
)
|
||||||
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
|
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
|
||||||
instance = CoinbaseData(client, base_rate, version)
|
instance = CoinbaseData(client, base_rate)
|
||||||
instance.update()
|
instance.update()
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
|
|
||||||
async def update_listener(
|
def get_accounts(client):
|
||||||
hass: HomeAssistant, config_entry: CoinbaseConfigEntry
|
|
||||||
) -> None:
|
|
||||||
"""Handle options update."""
|
|
||||||
|
|
||||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
|
||||||
|
|
||||||
registry = er.async_get(hass)
|
|
||||||
entities = er.async_entries_for_config_entry(registry, config_entry.entry_id)
|
|
||||||
|
|
||||||
# Remove orphaned entities
|
|
||||||
for entity in entities:
|
|
||||||
currency = entity.unique_id.split("-")[-1]
|
|
||||||
if (
|
|
||||||
"xe" in entity.unique_id
|
|
||||||
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
|
|
||||||
) or (
|
|
||||||
"wallet" in entity.unique_id
|
|
||||||
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
|
|
||||||
):
|
|
||||||
registry.async_remove(entity.entity_id)
|
|
||||||
|
|
||||||
|
|
||||||
def get_accounts(client, version):
|
|
||||||
"""Handle paginated accounts."""
|
"""Handle paginated accounts."""
|
||||||
response = client.get_accounts()
|
response = client.get_accounts()
|
||||||
if version == "v2":
|
|
||||||
accounts = response[API_DATA]
|
|
||||||
next_starting_after = response.pagination.next_starting_after
|
|
||||||
|
|
||||||
while next_starting_after:
|
|
||||||
response = client.get_accounts(starting_after=next_starting_after)
|
|
||||||
accounts += response[API_DATA]
|
|
||||||
next_starting_after = response.pagination.next_starting_after
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
API_ACCOUNT_ID: account[API_ACCOUNT_ID],
|
|
||||||
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
|
|
||||||
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][
|
|
||||||
API_ACCOUNT_CURRENCY_CODE
|
|
||||||
],
|
|
||||||
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT],
|
|
||||||
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT,
|
|
||||||
}
|
|
||||||
for account in accounts
|
|
||||||
]
|
|
||||||
|
|
||||||
accounts = response[API_ACCOUNTS]
|
accounts = response[API_ACCOUNTS]
|
||||||
while response["has_next"]:
|
while response["has_next"]:
|
||||||
response = client.get_accounts(cursor=response["cursor"])
|
response = client.get_accounts(cursor=response["cursor"])
|
||||||
@@ -153,37 +103,28 @@ def get_accounts(client, version):
|
|||||||
class CoinbaseData:
|
class CoinbaseData:
|
||||||
"""Get the latest data and update the states."""
|
"""Get the latest data and update the states."""
|
||||||
|
|
||||||
def __init__(self, client, exchange_base, version):
|
def __init__(self, client, exchange_base):
|
||||||
"""Init the coinbase data object."""
|
"""Init the coinbase data object."""
|
||||||
|
|
||||||
self.client = client
|
self.client = client
|
||||||
self.accounts = None
|
self.accounts = None
|
||||||
self.exchange_base = exchange_base
|
self.exchange_base = exchange_base
|
||||||
self.exchange_rates = None
|
self.exchange_rates = None
|
||||||
if version == "v2":
|
self.user_id = (
|
||||||
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
|
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
|
||||||
else:
|
)
|
||||||
self.user_id = (
|
|
||||||
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
|
|
||||||
)
|
|
||||||
self.api_version = version
|
|
||||||
|
|
||||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||||
def update(self):
|
def update(self):
|
||||||
"""Get the latest data from coinbase."""
|
"""Get the latest data from coinbase."""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.accounts = get_accounts(self.client, self.api_version)
|
self.accounts = get_accounts(self.client)
|
||||||
if self.api_version == "v2":
|
self.exchange_rates = self.client.get(
|
||||||
self.exchange_rates = self.client.get_exchange_rates(
|
"/v2/exchange-rates",
|
||||||
currency=self.exchange_base
|
params={API_RATES_CURRENCY: self.exchange_base},
|
||||||
)
|
)[API_DATA]
|
||||||
else:
|
except HTTPError as coinbase_error:
|
||||||
self.exchange_rates = self.client.get(
|
|
||||||
"/v2/exchange-rates",
|
|
||||||
params={API_RATES_CURRENCY: self.exchange_base},
|
|
||||||
)[API_DATA]
|
|
||||||
except (AuthenticationError, HTTPError) as coinbase_error:
|
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Authentication error connecting to coinbase: %s", coinbase_error
|
"Authentication error connecting to coinbase: %s", coinbase_error
|
||||||
)
|
)
|
||||||
|
@@ -2,17 +2,20 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from coinbase.rest import RESTClient
|
from coinbase.rest import RESTClient
|
||||||
from coinbase.rest.rest_base import HTTPError
|
from coinbase.rest.rest_base import HTTPError
|
||||||
from coinbase.wallet.client import Client as LegacyClient
|
|
||||||
from coinbase.wallet.error import AuthenticationError
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
from homeassistant.config_entries import (
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
|
ConfigFlow,
|
||||||
|
ConfigFlowResult,
|
||||||
|
OptionsFlowWithReload,
|
||||||
|
)
|
||||||
|
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
@@ -45,9 +48,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||||||
|
|
||||||
def get_user_from_client(api_key, api_token):
|
def get_user_from_client(api_key, api_token):
|
||||||
"""Get the user name from Coinbase API credentials."""
|
"""Get the user name from Coinbase API credentials."""
|
||||||
if "organizations" not in api_key:
|
|
||||||
client = LegacyClient(api_key, api_token)
|
|
||||||
return client.get_current_user()["name"]
|
|
||||||
client = RESTClient(api_key=api_key, api_secret=api_token)
|
client = RESTClient(api_key=api_key, api_secret=api_token)
|
||||||
return client.get_portfolios()["portfolios"][0]["name"]
|
return client.get_portfolios()["portfolios"][0]["name"]
|
||||||
|
|
||||||
@@ -59,7 +59,7 @@ async def validate_api(hass: HomeAssistant, data):
|
|||||||
user = await hass.async_add_executor_job(
|
user = await hass.async_add_executor_job(
|
||||||
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
|
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
|
||||||
)
|
)
|
||||||
except (AuthenticationError, HTTPError) as error:
|
except HTTPError as error:
|
||||||
if "api key" in str(error) or " 401 Client Error" in str(error):
|
if "api key" in str(error) or " 401 Client Error" in str(error):
|
||||||
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
|
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
|
||||||
raise InvalidKey from error
|
raise InvalidKey from error
|
||||||
@@ -74,8 +74,8 @@ async def validate_api(hass: HomeAssistant, data):
|
|||||||
raise InvalidAuth from error
|
raise InvalidAuth from error
|
||||||
except ConnectionError as error:
|
except ConnectionError as error:
|
||||||
raise CannotConnect from error
|
raise CannotConnect from error
|
||||||
api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2"
|
|
||||||
return {"title": user, "api_version": api_version}
|
return {"title": user}
|
||||||
|
|
||||||
|
|
||||||
async def validate_options(
|
async def validate_options(
|
||||||
@@ -85,20 +85,17 @@ async def validate_options(
|
|||||||
|
|
||||||
client = config_entry.runtime_data.client
|
client = config_entry.runtime_data.client
|
||||||
|
|
||||||
accounts = await hass.async_add_executor_job(
|
accounts = await hass.async_add_executor_job(get_accounts, client)
|
||||||
get_accounts, client, config_entry.data.get("api_version", "v2")
|
|
||||||
)
|
|
||||||
|
|
||||||
accounts_currencies = [
|
accounts_currencies = [
|
||||||
account[API_ACCOUNT_CURRENCY]
|
account[API_ACCOUNT_CURRENCY]
|
||||||
for account in accounts
|
for account in accounts
|
||||||
if not account[ACCOUNT_IS_VAULT]
|
if not account[ACCOUNT_IS_VAULT]
|
||||||
]
|
]
|
||||||
if config_entry.data.get("api_version", "v2") == "v2":
|
|
||||||
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
|
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
|
||||||
else:
|
available_rates = resp[API_DATA]
|
||||||
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
|
|
||||||
available_rates = resp[API_DATA]
|
|
||||||
if CONF_CURRENCIES in options:
|
if CONF_CURRENCIES in options:
|
||||||
for currency in options[CONF_CURRENCIES]:
|
for currency in options[CONF_CURRENCIES]:
|
||||||
if currency not in accounts_currencies:
|
if currency not in accounts_currencies:
|
||||||
@@ -117,6 +114,8 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
|
||||||
|
reauth_entry: CoinbaseConfigEntry
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, str] | None = None
|
self, user_input: dict[str, str] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
@@ -143,12 +142,63 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
user_input[CONF_API_VERSION] = info["api_version"]
|
|
||||||
return self.async_create_entry(title=info["title"], data=user_input)
|
return self.async_create_entry(title=info["title"], data=user_input)
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reauthentication flow."""
|
||||||
|
self.reauth_entry = self._get_reauth_entry()
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reauthentication confirmation."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is None:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=STEP_USER_DATA_SCHEMA,
|
||||||
|
description_placeholders={
|
||||||
|
"account_name": self.reauth_entry.title,
|
||||||
|
},
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await validate_api(self.hass, user_input)
|
||||||
|
except CannotConnect:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except InvalidKey:
|
||||||
|
errors["base"] = "invalid_auth_key"
|
||||||
|
except InvalidSecret:
|
||||||
|
errors["base"] = "invalid_auth_secret"
|
||||||
|
except InvalidAuth:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
self.reauth_entry,
|
||||||
|
data_updates=user_input,
|
||||||
|
reason="reauth_successful",
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=STEP_USER_DATA_SCHEMA,
|
||||||
|
description_placeholders={
|
||||||
|
"account_name": self.reauth_entry.title,
|
||||||
|
},
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@callback
|
@callback
|
||||||
def async_get_options_flow(
|
def async_get_options_flow(
|
||||||
@@ -158,7 +208,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return OptionsFlowHandler()
|
return OptionsFlowHandler()
|
||||||
|
|
||||||
|
|
||||||
class OptionsFlowHandler(OptionsFlow):
|
class OptionsFlowHandler(OptionsFlowWithReload):
|
||||||
"""Handle a option flow for Coinbase."""
|
"""Handle a option flow for Coinbase."""
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/coinbase",
|
"documentation": "https://www.home-assistant.io/integrations/coinbase",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["coinbase"],
|
"loggers": ["coinbase"],
|
||||||
"requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"]
|
"requirements": ["coinbase-advanced-py==1.2.2"]
|
||||||
}
|
}
|
||||||
|
@@ -6,6 +6,7 @@ import logging
|
|||||||
|
|
||||||
from homeassistant.components.sensor import SensorEntity, SensorStateClass
|
from homeassistant.components.sensor import SensorEntity, SensorStateClass
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
@@ -27,7 +28,6 @@ from .const import (
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
ATTR_NATIVE_BALANCE = "Balance in native currency"
|
ATTR_NATIVE_BALANCE = "Balance in native currency"
|
||||||
ATTR_API_VERSION = "API Version"
|
|
||||||
|
|
||||||
CURRENCY_ICONS = {
|
CURRENCY_ICONS = {
|
||||||
"BTC": "mdi:currency-btc",
|
"BTC": "mdi:currency-btc",
|
||||||
@@ -69,11 +69,26 @@ async def async_setup_entry(
|
|||||||
CONF_EXCHANGE_PRECISION, CONF_EXCHANGE_PRECISION_DEFAULT
|
CONF_EXCHANGE_PRECISION, CONF_EXCHANGE_PRECISION_DEFAULT
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Remove orphaned entities
|
||||||
|
registry = er.async_get(hass)
|
||||||
|
existing_entities = er.async_entries_for_config_entry(
|
||||||
|
registry, config_entry.entry_id
|
||||||
|
)
|
||||||
|
for entity in existing_entities:
|
||||||
|
currency = entity.unique_id.split("-")[-1]
|
||||||
|
if (
|
||||||
|
"xe" in entity.unique_id
|
||||||
|
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
|
||||||
|
) or (
|
||||||
|
"wallet" in entity.unique_id
|
||||||
|
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
|
||||||
|
):
|
||||||
|
registry.async_remove(entity.entity_id)
|
||||||
|
|
||||||
for currency in desired_currencies:
|
for currency in desired_currencies:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Attempting to set up %s account sensor with %s API",
|
"Attempting to set up %s account sensor",
|
||||||
currency,
|
currency,
|
||||||
instance.api_version,
|
|
||||||
)
|
)
|
||||||
if currency not in provided_currencies:
|
if currency not in provided_currencies:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
@@ -89,9 +104,8 @@ async def async_setup_entry(
|
|||||||
if CONF_EXCHANGE_RATES in config_entry.options:
|
if CONF_EXCHANGE_RATES in config_entry.options:
|
||||||
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
|
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Attempting to set up %s account sensor with %s API",
|
"Attempting to set up %s exchange rate sensor",
|
||||||
rate,
|
rate,
|
||||||
instance.api_version,
|
|
||||||
)
|
)
|
||||||
entities.append(
|
entities.append(
|
||||||
ExchangeRateSensor(
|
ExchangeRateSensor(
|
||||||
@@ -146,15 +160,13 @@ class AccountSensor(SensorEntity):
|
|||||||
"""Return the state attributes of the sensor."""
|
"""Return the state attributes of the sensor."""
|
||||||
return {
|
return {
|
||||||
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
|
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
|
||||||
ATTR_API_VERSION: self._coinbase_data.api_version,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
"""Get the latest state of the sensor."""
|
"""Get the latest state of the sensor."""
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Updating %s account sensor with %s API",
|
"Updating %s account sensor",
|
||||||
self._currency,
|
self._currency,
|
||||||
self._coinbase_data.api_version,
|
|
||||||
)
|
)
|
||||||
self._coinbase_data.update()
|
self._coinbase_data.update()
|
||||||
for account in self._coinbase_data.accounts:
|
for account in self._coinbase_data.accounts:
|
||||||
@@ -210,9 +222,8 @@ class ExchangeRateSensor(SensorEntity):
|
|||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
"""Get the latest state of the sensor."""
|
"""Get the latest state of the sensor."""
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Updating %s rate sensor with %s API",
|
"Updating %s rate sensor",
|
||||||
self._currency,
|
self._currency,
|
||||||
self._coinbase_data.api_version,
|
|
||||||
)
|
)
|
||||||
self._coinbase_data.update()
|
self._coinbase_data.update()
|
||||||
self._attr_native_value = round(
|
self._attr_native_value = round(
|
||||||
|
@@ -8,6 +8,14 @@
|
|||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
"api_token": "API secret"
|
"api_token": "API secret"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"reauth_confirm": {
|
||||||
|
"title": "Update Coinbase API credentials",
|
||||||
|
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform to create new credentials for {account_name}.",
|
||||||
|
"data": {
|
||||||
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
|
"api_token": "API secret"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
@@ -18,7 +26,8 @@
|
|||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"reauth_successful": "Successfully updated credentials"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"options": {
|
"options": {
|
||||||
|
@@ -137,20 +137,16 @@ class ConfigManagerEntryResourceReloadView(HomeAssistantView):
|
|||||||
|
|
||||||
def _prepare_config_flow_result_json(
|
def _prepare_config_flow_result_json(
|
||||||
result: data_entry_flow.FlowResult,
|
result: data_entry_flow.FlowResult,
|
||||||
prepare_result_json: Callable[
|
prepare_result_json: Callable[[data_entry_flow.FlowResult], dict[str, Any]],
|
||||||
[data_entry_flow.FlowResult], data_entry_flow.FlowResult
|
) -> dict[str, Any]:
|
||||||
],
|
|
||||||
) -> data_entry_flow.FlowResult:
|
|
||||||
"""Convert result to JSON."""
|
"""Convert result to JSON."""
|
||||||
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
||||||
return prepare_result_json(result)
|
return prepare_result_json(result)
|
||||||
|
|
||||||
data = result.copy()
|
data = {key: val for key, val in result.items() if key not in ("data", "context")}
|
||||||
entry: config_entries.ConfigEntry = data["result"] # type: ignore[typeddict-item]
|
entry: config_entries.ConfigEntry = result["result"] # type: ignore[typeddict-item]
|
||||||
# We overwrite the ConfigEntry object with its json representation.
|
# We overwrite the ConfigEntry object with its json representation.
|
||||||
data["result"] = entry.as_json_fragment # type: ignore[typeddict-unknown-key]
|
data["result"] = entry.as_json_fragment
|
||||||
data.pop("data")
|
|
||||||
data.pop("context")
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@@ -204,8 +200,8 @@ class ConfigManagerFlowIndexView(
|
|||||||
|
|
||||||
def _prepare_result_json(
|
def _prepare_result_json(
|
||||||
self, result: data_entry_flow.FlowResult
|
self, result: data_entry_flow.FlowResult
|
||||||
) -> data_entry_flow.FlowResult:
|
) -> dict[str, Any]:
|
||||||
"""Convert result to JSON."""
|
"""Convert result to JSON serializable dict."""
|
||||||
return _prepare_config_flow_result_json(result, super()._prepare_result_json)
|
return _prepare_config_flow_result_json(result, super()._prepare_result_json)
|
||||||
|
|
||||||
|
|
||||||
@@ -229,8 +225,8 @@ class ConfigManagerFlowResourceView(
|
|||||||
|
|
||||||
def _prepare_result_json(
|
def _prepare_result_json(
|
||||||
self, result: data_entry_flow.FlowResult
|
self, result: data_entry_flow.FlowResult
|
||||||
) -> data_entry_flow.FlowResult:
|
) -> dict[str, Any]:
|
||||||
"""Convert result to JSON."""
|
"""Convert result to JSON serializable dict."""
|
||||||
return _prepare_config_flow_result_json(result, super()._prepare_result_json)
|
return _prepare_config_flow_result_json(result, super()._prepare_result_json)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -40,6 +40,7 @@ from .chat_log import (
|
|||||||
ConverseError,
|
ConverseError,
|
||||||
SystemContent,
|
SystemContent,
|
||||||
ToolResultContent,
|
ToolResultContent,
|
||||||
|
ToolResultContentDeltaDict,
|
||||||
UserContent,
|
UserContent,
|
||||||
async_get_chat_log,
|
async_get_chat_log,
|
||||||
)
|
)
|
||||||
@@ -79,6 +80,7 @@ __all__ = [
|
|||||||
"ConverseError",
|
"ConverseError",
|
||||||
"SystemContent",
|
"SystemContent",
|
||||||
"ToolResultContent",
|
"ToolResultContent",
|
||||||
|
"ToolResultContentDeltaDict",
|
||||||
"UserContent",
|
"UserContent",
|
||||||
"async_conversation_trace_append",
|
"async_conversation_trace_append",
|
||||||
"async_converse",
|
"async_converse",
|
||||||
@@ -117,7 +119,7 @@ CONFIG_SCHEMA = vol.Schema(
|
|||||||
{cv.string: vol.All(cv.ensure_list, [cv.string])}
|
{cv.string: vol.All(cv.ensure_list, [cv.string])}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
},
|
},
|
||||||
extra=vol.ALLOW_EXTRA,
|
extra=vol.ALLOW_EXTRA,
|
||||||
)
|
)
|
||||||
@@ -268,8 +270,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
||||||
hass.data[DATA_COMPONENT] = entity_component
|
hass.data[DATA_COMPONENT] = entity_component
|
||||||
|
|
||||||
|
agent_config = config.get(DOMAIN, {})
|
||||||
await async_setup_default_agent(
|
await async_setup_default_agent(
|
||||||
hass, entity_component, config.get(DOMAIN, {}).get("intents", {})
|
hass, entity_component, config_intents=agent_config.get("intents", {})
|
||||||
)
|
)
|
||||||
|
|
||||||
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
||||||
|
@@ -9,7 +9,7 @@ from contextvars import ContextVar
|
|||||||
from dataclasses import asdict, dataclass, field, replace
|
from dataclasses import asdict, dataclass, field, replace
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Literal, TypedDict
|
from typing import Any, Literal, TypedDict, cast
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -161,7 +161,9 @@ class AssistantContent:
|
|||||||
role: Literal["assistant"] = field(init=False, default="assistant")
|
role: Literal["assistant"] = field(init=False, default="assistant")
|
||||||
agent_id: str
|
agent_id: str
|
||||||
content: str | None = None
|
content: str | None = None
|
||||||
|
thinking_content: str | None = None
|
||||||
tool_calls: list[llm.ToolInput] | None = None
|
tool_calls: list[llm.ToolInput] | None = None
|
||||||
|
native: Any = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
@@ -183,7 +185,18 @@ class AssistantContentDeltaDict(TypedDict, total=False):
|
|||||||
|
|
||||||
role: Literal["assistant"]
|
role: Literal["assistant"]
|
||||||
content: str | None
|
content: str | None
|
||||||
|
thinking_content: str | None
|
||||||
tool_calls: list[llm.ToolInput] | None
|
tool_calls: list[llm.ToolInput] | None
|
||||||
|
native: Any
|
||||||
|
|
||||||
|
|
||||||
|
class ToolResultContentDeltaDict(TypedDict, total=False):
|
||||||
|
"""Tool result content."""
|
||||||
|
|
||||||
|
role: Literal["tool_result"]
|
||||||
|
tool_call_id: str
|
||||||
|
tool_name: str
|
||||||
|
tool_result: JsonObjectType
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -231,17 +244,25 @@ class ChatLog:
|
|||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_add_assistant_content_without_tools(
|
def async_add_assistant_content_without_tools(
|
||||||
self, content: AssistantContent
|
self, content: AssistantContent | ToolResultContent
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add assistant content to the log."""
|
"""Add assistant content to the log.
|
||||||
|
|
||||||
|
Allows assistant content without tool calls or with external tool calls only,
|
||||||
|
as well as tool results for the external tools.
|
||||||
|
"""
|
||||||
LOGGER.debug("Adding assistant content: %s", content)
|
LOGGER.debug("Adding assistant content: %s", content)
|
||||||
if content.tool_calls is not None:
|
if (
|
||||||
raise ValueError("Tool calls not allowed")
|
isinstance(content, AssistantContent)
|
||||||
|
and content.tool_calls is not None
|
||||||
|
and any(not tool_call.external for tool_call in content.tool_calls)
|
||||||
|
):
|
||||||
|
raise ValueError("Non-external tool calls not allowed")
|
||||||
self.content.append(content)
|
self.content.append(content)
|
||||||
|
|
||||||
async def async_add_assistant_content(
|
async def async_add_assistant_content(
|
||||||
self,
|
self,
|
||||||
content: AssistantContent,
|
content: AssistantContent | ToolResultContent,
|
||||||
/,
|
/,
|
||||||
tool_call_tasks: dict[str, asyncio.Task] | None = None,
|
tool_call_tasks: dict[str, asyncio.Task] | None = None,
|
||||||
) -> AsyncGenerator[ToolResultContent]:
|
) -> AsyncGenerator[ToolResultContent]:
|
||||||
@@ -254,7 +275,11 @@ class ChatLog:
|
|||||||
LOGGER.debug("Adding assistant content: %s", content)
|
LOGGER.debug("Adding assistant content: %s", content)
|
||||||
self.content.append(content)
|
self.content.append(content)
|
||||||
|
|
||||||
if content.tool_calls is None:
|
if (
|
||||||
|
not isinstance(content, AssistantContent)
|
||||||
|
or content.tool_calls is None
|
||||||
|
or all(tool_call.external for tool_call in content.tool_calls)
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.llm_api is None:
|
if self.llm_api is None:
|
||||||
@@ -263,13 +288,16 @@ class ChatLog:
|
|||||||
if tool_call_tasks is None:
|
if tool_call_tasks is None:
|
||||||
tool_call_tasks = {}
|
tool_call_tasks = {}
|
||||||
for tool_input in content.tool_calls:
|
for tool_input in content.tool_calls:
|
||||||
if tool_input.id not in tool_call_tasks:
|
if tool_input.id not in tool_call_tasks and not tool_input.external:
|
||||||
tool_call_tasks[tool_input.id] = self.hass.async_create_task(
|
tool_call_tasks[tool_input.id] = self.hass.async_create_task(
|
||||||
self.llm_api.async_call_tool(tool_input),
|
self.llm_api.async_call_tool(tool_input),
|
||||||
name=f"llm_tool_{tool_input.id}",
|
name=f"llm_tool_{tool_input.id}",
|
||||||
)
|
)
|
||||||
|
|
||||||
for tool_input in content.tool_calls:
|
for tool_input in content.tool_calls:
|
||||||
|
if tool_input.external:
|
||||||
|
continue
|
||||||
|
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
|
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
|
||||||
)
|
)
|
||||||
@@ -292,7 +320,9 @@ class ChatLog:
|
|||||||
yield response_content
|
yield response_content
|
||||||
|
|
||||||
async def async_add_delta_content_stream(
|
async def async_add_delta_content_stream(
|
||||||
self, agent_id: str, stream: AsyncIterable[AssistantContentDeltaDict]
|
self,
|
||||||
|
agent_id: str,
|
||||||
|
stream: AsyncIterable[AssistantContentDeltaDict | ToolResultContentDeltaDict],
|
||||||
) -> AsyncGenerator[AssistantContent | ToolResultContent]:
|
) -> AsyncGenerator[AssistantContent | ToolResultContent]:
|
||||||
"""Stream content into the chat log.
|
"""Stream content into the chat log.
|
||||||
|
|
||||||
@@ -306,6 +336,8 @@ class ChatLog:
|
|||||||
The keys content and tool_calls will be concatenated if they appear multiple times.
|
The keys content and tool_calls will be concatenated if they appear multiple times.
|
||||||
"""
|
"""
|
||||||
current_content = ""
|
current_content = ""
|
||||||
|
current_thinking_content = ""
|
||||||
|
current_native: Any = None
|
||||||
current_tool_calls: list[llm.ToolInput] = []
|
current_tool_calls: list[llm.ToolInput] = []
|
||||||
tool_call_tasks: dict[str, asyncio.Task] = {}
|
tool_call_tasks: dict[str, asyncio.Task] = {}
|
||||||
|
|
||||||
@@ -314,34 +346,54 @@ class ChatLog:
|
|||||||
|
|
||||||
# Indicates update to current message
|
# Indicates update to current message
|
||||||
if "role" not in delta:
|
if "role" not in delta:
|
||||||
if delta_content := delta.get("content"):
|
# ToolResultContentDeltaDict will always have a role
|
||||||
|
assistant_delta = cast(AssistantContentDeltaDict, delta)
|
||||||
|
if delta_content := assistant_delta.get("content"):
|
||||||
current_content += delta_content
|
current_content += delta_content
|
||||||
if delta_tool_calls := delta.get("tool_calls"):
|
if delta_thinking_content := assistant_delta.get("thinking_content"):
|
||||||
if self.llm_api is None:
|
current_thinking_content += delta_thinking_content
|
||||||
raise ValueError("No LLM API configured")
|
if delta_native := assistant_delta.get("native"):
|
||||||
|
if current_native is not None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Native content already set, cannot overwrite"
|
||||||
|
)
|
||||||
|
current_native = delta_native
|
||||||
|
if delta_tool_calls := assistant_delta.get("tool_calls"):
|
||||||
current_tool_calls += delta_tool_calls
|
current_tool_calls += delta_tool_calls
|
||||||
|
|
||||||
# Start processing the tool calls as soon as we know about them
|
# Start processing the tool calls as soon as we know about them
|
||||||
for tool_call in delta_tool_calls:
|
for tool_call in delta_tool_calls:
|
||||||
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
|
if not tool_call.external:
|
||||||
self.llm_api.async_call_tool(tool_call),
|
if self.llm_api is None:
|
||||||
name=f"llm_tool_{tool_call.id}",
|
raise ValueError("No LLM API configured")
|
||||||
)
|
|
||||||
|
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
|
||||||
|
self.llm_api.async_call_tool(tool_call),
|
||||||
|
name=f"llm_tool_{tool_call.id}",
|
||||||
|
)
|
||||||
if self.delta_listener:
|
if self.delta_listener:
|
||||||
self.delta_listener(self, delta) # type: ignore[arg-type]
|
if filtered_delta := {
|
||||||
|
k: v for k, v in assistant_delta.items() if k != "native"
|
||||||
|
}:
|
||||||
|
# We do not want to send the native content to the listener
|
||||||
|
# as it is not JSON serializable
|
||||||
|
self.delta_listener(self, filtered_delta)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Starting a new message
|
# Starting a new message
|
||||||
|
|
||||||
if delta["role"] != "assistant":
|
|
||||||
raise ValueError(f"Only assistant role expected. Got {delta['role']}")
|
|
||||||
|
|
||||||
# Yield the previous message if it has content
|
# Yield the previous message if it has content
|
||||||
if current_content or current_tool_calls:
|
if (
|
||||||
content = AssistantContent(
|
current_content
|
||||||
|
or current_thinking_content
|
||||||
|
or current_tool_calls
|
||||||
|
or current_native
|
||||||
|
):
|
||||||
|
content: AssistantContent | ToolResultContent = AssistantContent(
|
||||||
agent_id=agent_id,
|
agent_id=agent_id,
|
||||||
content=current_content or None,
|
content=current_content or None,
|
||||||
|
thinking_content=current_thinking_content or None,
|
||||||
tool_calls=current_tool_calls or None,
|
tool_calls=current_tool_calls or None,
|
||||||
|
native=current_native,
|
||||||
)
|
)
|
||||||
yield content
|
yield content
|
||||||
async for tool_result in self.async_add_assistant_content(
|
async for tool_result in self.async_add_assistant_content(
|
||||||
@@ -350,18 +402,51 @@ class ChatLog:
|
|||||||
yield tool_result
|
yield tool_result
|
||||||
if self.delta_listener:
|
if self.delta_listener:
|
||||||
self.delta_listener(self, asdict(tool_result))
|
self.delta_listener(self, asdict(tool_result))
|
||||||
|
current_content = ""
|
||||||
|
current_thinking_content = ""
|
||||||
|
current_native = None
|
||||||
|
current_tool_calls = []
|
||||||
|
|
||||||
current_content = delta.get("content") or ""
|
if delta["role"] == "assistant":
|
||||||
current_tool_calls = delta.get("tool_calls") or []
|
current_content = delta.get("content") or ""
|
||||||
|
current_thinking_content = delta.get("thinking_content") or ""
|
||||||
|
current_tool_calls = delta.get("tool_calls") or []
|
||||||
|
current_native = delta.get("native")
|
||||||
|
|
||||||
if self.delta_listener:
|
if self.delta_listener:
|
||||||
self.delta_listener(self, delta) # type: ignore[arg-type]
|
if filtered_delta := {
|
||||||
|
k: v for k, v in delta.items() if k != "native"
|
||||||
|
}:
|
||||||
|
self.delta_listener(self, filtered_delta)
|
||||||
|
elif delta["role"] == "tool_result":
|
||||||
|
content = ToolResultContent(
|
||||||
|
agent_id=agent_id,
|
||||||
|
tool_call_id=delta["tool_call_id"],
|
||||||
|
tool_name=delta["tool_name"],
|
||||||
|
tool_result=delta["tool_result"],
|
||||||
|
)
|
||||||
|
yield content
|
||||||
|
if self.delta_listener:
|
||||||
|
self.delta_listener(self, asdict(content))
|
||||||
|
self.async_add_assistant_content_without_tools(content)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Only assistant and tool_result roles expected."
|
||||||
|
f" Got {delta['role']}"
|
||||||
|
)
|
||||||
|
|
||||||
if current_content or current_tool_calls:
|
if (
|
||||||
|
current_content
|
||||||
|
or current_thinking_content
|
||||||
|
or current_tool_calls
|
||||||
|
or current_native
|
||||||
|
):
|
||||||
content = AssistantContent(
|
content = AssistantContent(
|
||||||
agent_id=agent_id,
|
agent_id=agent_id,
|
||||||
content=current_content or None,
|
content=current_content or None,
|
||||||
|
thinking_content=current_thinking_content or None,
|
||||||
tool_calls=current_tool_calls or None,
|
tool_calls=current_tool_calls or None,
|
||||||
|
native=current_native,
|
||||||
)
|
)
|
||||||
yield content
|
yield content
|
||||||
async for tool_result in self.async_add_assistant_content(
|
async for tool_result in self.async_add_assistant_content(
|
||||||
|
@@ -14,14 +14,19 @@ import re
|
|||||||
import time
|
import time
|
||||||
from typing import IO, Any, cast
|
from typing import IO, Any, cast
|
||||||
|
|
||||||
from hassil.expression import Expression, ListReference, Sequence, TextChunk
|
from hassil.expression import Expression, Group, ListReference, TextChunk
|
||||||
|
from hassil.fuzzy import FuzzyNgramMatcher, SlotCombinationInfo
|
||||||
from hassil.intents import (
|
from hassil.intents import (
|
||||||
|
Intent,
|
||||||
|
IntentData,
|
||||||
Intents,
|
Intents,
|
||||||
SlotList,
|
SlotList,
|
||||||
TextSlotList,
|
TextSlotList,
|
||||||
TextSlotValue,
|
TextSlotValue,
|
||||||
WildcardSlotList,
|
WildcardSlotList,
|
||||||
)
|
)
|
||||||
|
from hassil.models import MatchEntity
|
||||||
|
from hassil.ngram import Sqlite3NgramModel
|
||||||
from hassil.recognize import (
|
from hassil.recognize import (
|
||||||
MISSING_ENTITY,
|
MISSING_ENTITY,
|
||||||
RecognizeResult,
|
RecognizeResult,
|
||||||
@@ -31,7 +36,15 @@ from hassil.recognize import (
|
|||||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||||
from hassil.trie import Trie
|
from hassil.trie import Trie
|
||||||
from hassil.util import merge_dict
|
from hassil.util import merge_dict
|
||||||
from home_assistant_intents import ErrorKey, get_intents, get_languages
|
from home_assistant_intents import (
|
||||||
|
ErrorKey,
|
||||||
|
FuzzyConfig,
|
||||||
|
FuzzyLanguageResponses,
|
||||||
|
get_fuzzy_config,
|
||||||
|
get_fuzzy_language,
|
||||||
|
get_intents,
|
||||||
|
get_languages,
|
||||||
|
)
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from homeassistant import core
|
from homeassistant import core
|
||||||
@@ -76,6 +89,7 @@ TRIGGER_CALLBACK_TYPE = Callable[
|
|||||||
]
|
]
|
||||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||||
|
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
|
||||||
|
|
||||||
ERROR_SENTINEL = object()
|
ERROR_SENTINEL = object()
|
||||||
|
|
||||||
@@ -94,6 +108,8 @@ class LanguageIntents:
|
|||||||
intent_responses: dict[str, Any]
|
intent_responses: dict[str, Any]
|
||||||
error_responses: dict[str, Any]
|
error_responses: dict[str, Any]
|
||||||
language_variant: str | None
|
language_variant: str | None
|
||||||
|
fuzzy_matcher: FuzzyNgramMatcher | None = None
|
||||||
|
fuzzy_responses: FuzzyLanguageResponses | None = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
@@ -119,10 +135,13 @@ class IntentMatchingStage(Enum):
|
|||||||
EXPOSED_ENTITIES_ONLY = auto()
|
EXPOSED_ENTITIES_ONLY = auto()
|
||||||
"""Match against exposed entities only."""
|
"""Match against exposed entities only."""
|
||||||
|
|
||||||
|
FUZZY = auto()
|
||||||
|
"""Use fuzzy matching to guess intent."""
|
||||||
|
|
||||||
UNEXPOSED_ENTITIES = auto()
|
UNEXPOSED_ENTITIES = auto()
|
||||||
"""Match against unexposed entities in Home Assistant."""
|
"""Match against unexposed entities in Home Assistant."""
|
||||||
|
|
||||||
FUZZY = auto()
|
UNKNOWN_NAMES = auto()
|
||||||
"""Capture names that are not known to Home Assistant."""
|
"""Capture names that are not known to Home Assistant."""
|
||||||
|
|
||||||
|
|
||||||
@@ -241,6 +260,10 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# LRU cache to avoid unnecessary intent matching
|
# LRU cache to avoid unnecessary intent matching
|
||||||
self._intent_cache = IntentCache(capacity=128)
|
self._intent_cache = IntentCache(capacity=128)
|
||||||
|
|
||||||
|
# Shared configuration for fuzzy matching
|
||||||
|
self.fuzzy_matching = True
|
||||||
|
self._fuzzy_config: FuzzyConfig | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_languages(self) -> list[str]:
|
def supported_languages(self) -> list[str]:
|
||||||
"""Return a list of supported languages."""
|
"""Return a list of supported languages."""
|
||||||
@@ -299,7 +322,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
_LOGGER.warning("No intents were loaded for language: %s", language)
|
_LOGGER.warning("No intents were loaded for language: %s", language)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
slot_lists = self._make_slot_lists()
|
slot_lists = await self._make_slot_lists()
|
||||||
intent_context = self._make_intent_context(user_input)
|
intent_context = self._make_intent_context(user_input)
|
||||||
|
|
||||||
if self._exposed_names_trie is not None:
|
if self._exposed_names_trie is not None:
|
||||||
@@ -556,6 +579,36 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# Don't try matching against all entities or doing a fuzzy match
|
# Don't try matching against all entities or doing a fuzzy match
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# Use fuzzy matching
|
||||||
|
skip_fuzzy_match = False
|
||||||
|
if cache_value is not None:
|
||||||
|
if (cache_value.result is not None) and (
|
||||||
|
cache_value.stage == IntentMatchingStage.FUZZY
|
||||||
|
):
|
||||||
|
_LOGGER.debug("Got cached result for fuzzy match")
|
||||||
|
return cache_value.result
|
||||||
|
|
||||||
|
# Continue with matching, but we know we won't succeed for fuzzy
|
||||||
|
# match.
|
||||||
|
skip_fuzzy_match = True
|
||||||
|
|
||||||
|
if (not skip_fuzzy_match) and self.fuzzy_matching:
|
||||||
|
start_time = time.monotonic()
|
||||||
|
fuzzy_result = self._recognize_fuzzy(lang_intents, user_input)
|
||||||
|
|
||||||
|
# Update cache
|
||||||
|
self._intent_cache.put(
|
||||||
|
cache_key,
|
||||||
|
IntentCacheValue(result=fuzzy_result, stage=IntentMatchingStage.FUZZY),
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
|
||||||
|
)
|
||||||
|
|
||||||
|
if fuzzy_result is not None:
|
||||||
|
return fuzzy_result
|
||||||
|
|
||||||
# Try again with all entities (including unexposed)
|
# Try again with all entities (including unexposed)
|
||||||
skip_unexposed_entities_match = False
|
skip_unexposed_entities_match = False
|
||||||
if cache_value is not None:
|
if cache_value is not None:
|
||||||
@@ -601,102 +654,160 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# This should fail the intent handling phase (async_match_targets).
|
# This should fail the intent handling phase (async_match_targets).
|
||||||
return strict_result
|
return strict_result
|
||||||
|
|
||||||
# Try again with missing entities enabled
|
# Check unknown names
|
||||||
skip_fuzzy_match = False
|
skip_unknown_names = False
|
||||||
if cache_value is not None:
|
if cache_value is not None:
|
||||||
if (cache_value.result is not None) and (
|
if (cache_value.result is not None) and (
|
||||||
cache_value.stage == IntentMatchingStage.FUZZY
|
cache_value.stage == IntentMatchingStage.UNKNOWN_NAMES
|
||||||
):
|
):
|
||||||
_LOGGER.debug("Got cached result for fuzzy match")
|
_LOGGER.debug("Got cached result for unknown names")
|
||||||
return cache_value.result
|
return cache_value.result
|
||||||
|
|
||||||
# We know we won't succeed for fuzzy matching.
|
skip_unknown_names = True
|
||||||
skip_fuzzy_match = True
|
|
||||||
|
|
||||||
maybe_result: RecognizeResult | None = None
|
maybe_result: RecognizeResult | None = None
|
||||||
if not skip_fuzzy_match:
|
if not skip_unknown_names:
|
||||||
start_time = time.monotonic()
|
start_time = time.monotonic()
|
||||||
best_num_matched_entities = 0
|
maybe_result = self._recognize_unknown_names(
|
||||||
best_num_unmatched_entities = 0
|
lang_intents, user_input, slot_lists, intent_context
|
||||||
best_num_unmatched_ranges = 0
|
)
|
||||||
for result in recognize_all(
|
|
||||||
user_input.text,
|
|
||||||
lang_intents.intents,
|
|
||||||
slot_lists=slot_lists,
|
|
||||||
intent_context=intent_context,
|
|
||||||
allow_unmatched_entities=True,
|
|
||||||
):
|
|
||||||
if result.text_chunks_matched < 1:
|
|
||||||
# Skip results that don't match any literal text
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Don't count missing entities that couldn't be filled from context
|
|
||||||
num_matched_entities = 0
|
|
||||||
for matched_entity in result.entities_list:
|
|
||||||
if matched_entity.name not in result.unmatched_entities:
|
|
||||||
num_matched_entities += 1
|
|
||||||
|
|
||||||
num_unmatched_entities = 0
|
|
||||||
num_unmatched_ranges = 0
|
|
||||||
for unmatched_entity in result.unmatched_entities_list:
|
|
||||||
if isinstance(unmatched_entity, UnmatchedTextEntity):
|
|
||||||
if unmatched_entity.text != MISSING_ENTITY:
|
|
||||||
num_unmatched_entities += 1
|
|
||||||
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
|
|
||||||
num_unmatched_ranges += 1
|
|
||||||
num_unmatched_entities += 1
|
|
||||||
else:
|
|
||||||
num_unmatched_entities += 1
|
|
||||||
|
|
||||||
if (
|
|
||||||
(maybe_result is None) # first result
|
|
||||||
or (
|
|
||||||
# More literal text matched
|
|
||||||
result.text_chunks_matched > maybe_result.text_chunks_matched
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
# More entities matched
|
|
||||||
num_matched_entities > best_num_matched_entities
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
# Fewer unmatched entities
|
|
||||||
(num_matched_entities == best_num_matched_entities)
|
|
||||||
and (num_unmatched_entities < best_num_unmatched_entities)
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
# Prefer unmatched ranges
|
|
||||||
(num_matched_entities == best_num_matched_entities)
|
|
||||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
|
||||||
and (num_unmatched_ranges > best_num_unmatched_ranges)
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
# Prefer match failures with entities
|
|
||||||
(result.text_chunks_matched == maybe_result.text_chunks_matched)
|
|
||||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
|
||||||
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
|
||||||
and (
|
|
||||||
("name" in result.entities)
|
|
||||||
or ("name" in result.unmatched_entities)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
):
|
|
||||||
maybe_result = result
|
|
||||||
best_num_matched_entities = num_matched_entities
|
|
||||||
best_num_unmatched_entities = num_unmatched_entities
|
|
||||||
best_num_unmatched_ranges = num_unmatched_ranges
|
|
||||||
|
|
||||||
# Update cache
|
# Update cache
|
||||||
self._intent_cache.put(
|
self._intent_cache.put(
|
||||||
cache_key,
|
cache_key,
|
||||||
IntentCacheValue(result=maybe_result, stage=IntentMatchingStage.FUZZY),
|
IntentCacheValue(
|
||||||
|
result=maybe_result, stage=IntentMatchingStage.UNKNOWN_NAMES
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
|
"Did unknown names match in %s second(s)", time.monotonic() - start_time
|
||||||
)
|
)
|
||||||
|
|
||||||
return maybe_result
|
return maybe_result
|
||||||
|
|
||||||
|
def _recognize_fuzzy(
|
||||||
|
self, lang_intents: LanguageIntents, user_input: ConversationInput
|
||||||
|
) -> RecognizeResult | None:
|
||||||
|
"""Return fuzzy recognition from hassil."""
|
||||||
|
if lang_intents.fuzzy_matcher is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
fuzzy_result = lang_intents.fuzzy_matcher.match(user_input.text)
|
||||||
|
if fuzzy_result is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
response = "default"
|
||||||
|
if lang_intents.fuzzy_responses:
|
||||||
|
domain = "" # no domain
|
||||||
|
if "name" in fuzzy_result.slots:
|
||||||
|
domain = fuzzy_result.name_domain
|
||||||
|
elif "domain" in fuzzy_result.slots:
|
||||||
|
domain = fuzzy_result.slots["domain"].value
|
||||||
|
|
||||||
|
slot_combo = tuple(sorted(fuzzy_result.slots))
|
||||||
|
if (
|
||||||
|
intent_responses := lang_intents.fuzzy_responses.get(
|
||||||
|
fuzzy_result.intent_name
|
||||||
|
)
|
||||||
|
) and (combo_responses := intent_responses.get(slot_combo)):
|
||||||
|
response = combo_responses.get(domain, response)
|
||||||
|
|
||||||
|
entities = [
|
||||||
|
MatchEntity(name=slot_name, value=slot_value.value, text=slot_value.text)
|
||||||
|
for slot_name, slot_value in fuzzy_result.slots.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
return RecognizeResult(
|
||||||
|
intent=Intent(name=fuzzy_result.intent_name),
|
||||||
|
intent_data=IntentData(sentence_texts=[]),
|
||||||
|
intent_metadata={METADATA_FUZZY_MATCH: True},
|
||||||
|
entities={entity.name: entity for entity in entities},
|
||||||
|
entities_list=entities,
|
||||||
|
response=response,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _recognize_unknown_names(
|
||||||
|
self,
|
||||||
|
lang_intents: LanguageIntents,
|
||||||
|
user_input: ConversationInput,
|
||||||
|
slot_lists: dict[str, SlotList],
|
||||||
|
intent_context: dict[str, Any] | None,
|
||||||
|
) -> RecognizeResult | None:
|
||||||
|
"""Return result with unknown names for an error message."""
|
||||||
|
maybe_result: RecognizeResult | None = None
|
||||||
|
|
||||||
|
best_num_matched_entities = 0
|
||||||
|
best_num_unmatched_entities = 0
|
||||||
|
best_num_unmatched_ranges = 0
|
||||||
|
for result in recognize_all(
|
||||||
|
user_input.text,
|
||||||
|
lang_intents.intents,
|
||||||
|
slot_lists=slot_lists,
|
||||||
|
intent_context=intent_context,
|
||||||
|
allow_unmatched_entities=True,
|
||||||
|
):
|
||||||
|
if result.text_chunks_matched < 1:
|
||||||
|
# Skip results that don't match any literal text
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Don't count missing entities that couldn't be filled from context
|
||||||
|
num_matched_entities = 0
|
||||||
|
for matched_entity in result.entities_list:
|
||||||
|
if matched_entity.name not in result.unmatched_entities:
|
||||||
|
num_matched_entities += 1
|
||||||
|
|
||||||
|
num_unmatched_entities = 0
|
||||||
|
num_unmatched_ranges = 0
|
||||||
|
for unmatched_entity in result.unmatched_entities_list:
|
||||||
|
if isinstance(unmatched_entity, UnmatchedTextEntity):
|
||||||
|
if unmatched_entity.text != MISSING_ENTITY:
|
||||||
|
num_unmatched_entities += 1
|
||||||
|
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
|
||||||
|
num_unmatched_ranges += 1
|
||||||
|
num_unmatched_entities += 1
|
||||||
|
else:
|
||||||
|
num_unmatched_entities += 1
|
||||||
|
|
||||||
|
if (
|
||||||
|
(maybe_result is None) # first result
|
||||||
|
or (
|
||||||
|
# More literal text matched
|
||||||
|
result.text_chunks_matched > maybe_result.text_chunks_matched
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
# More entities matched
|
||||||
|
num_matched_entities > best_num_matched_entities
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
# Fewer unmatched entities
|
||||||
|
(num_matched_entities == best_num_matched_entities)
|
||||||
|
and (num_unmatched_entities < best_num_unmatched_entities)
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
# Prefer unmatched ranges
|
||||||
|
(num_matched_entities == best_num_matched_entities)
|
||||||
|
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||||
|
and (num_unmatched_ranges > best_num_unmatched_ranges)
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
# Prefer match failures with entities
|
||||||
|
(result.text_chunks_matched == maybe_result.text_chunks_matched)
|
||||||
|
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||||
|
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
||||||
|
and (
|
||||||
|
("name" in result.entities)
|
||||||
|
or ("name" in result.unmatched_entities)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
maybe_result = result
|
||||||
|
best_num_matched_entities = num_matched_entities
|
||||||
|
best_num_unmatched_entities = num_unmatched_entities
|
||||||
|
best_num_unmatched_ranges = num_unmatched_ranges
|
||||||
|
|
||||||
|
return maybe_result
|
||||||
|
|
||||||
def _get_unexposed_entity_names(self, text: str) -> TextSlotList:
|
def _get_unexposed_entity_names(self, text: str) -> TextSlotList:
|
||||||
"""Get filtered slot list with unexposed entity names in Home Assistant."""
|
"""Get filtered slot list with unexposed entity names in Home Assistant."""
|
||||||
if self._unexposed_names_trie is None:
|
if self._unexposed_names_trie is None:
|
||||||
@@ -851,7 +962,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
if lang_intents is None:
|
if lang_intents is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
self._make_slot_lists()
|
await self._make_slot_lists()
|
||||||
|
|
||||||
async def async_get_or_load_intents(self, language: str) -> LanguageIntents | None:
|
async def async_get_or_load_intents(self, language: str) -> LanguageIntents | None:
|
||||||
"""Load all intents of a language with lock."""
|
"""Load all intents of a language with lock."""
|
||||||
@@ -1002,12 +1113,85 @@ class DefaultAgent(ConversationEntity):
|
|||||||
intent_responses = responses_dict.get("intents", {})
|
intent_responses = responses_dict.get("intents", {})
|
||||||
error_responses = responses_dict.get("errors", {})
|
error_responses = responses_dict.get("errors", {})
|
||||||
|
|
||||||
|
if not self.fuzzy_matching:
|
||||||
|
_LOGGER.debug("Fuzzy matching is disabled")
|
||||||
|
return LanguageIntents(
|
||||||
|
intents,
|
||||||
|
intents_dict,
|
||||||
|
intent_responses,
|
||||||
|
error_responses,
|
||||||
|
language_variant,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load fuzzy
|
||||||
|
fuzzy_info = get_fuzzy_language(language_variant, json_load=json_load)
|
||||||
|
if fuzzy_info is None:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Fuzzy matching not available for language: %s", language_variant
|
||||||
|
)
|
||||||
|
return LanguageIntents(
|
||||||
|
intents,
|
||||||
|
intents_dict,
|
||||||
|
intent_responses,
|
||||||
|
error_responses,
|
||||||
|
language_variant,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._fuzzy_config is None:
|
||||||
|
# Load shared config
|
||||||
|
self._fuzzy_config = get_fuzzy_config(json_load=json_load)
|
||||||
|
_LOGGER.debug("Loaded shared fuzzy matching config")
|
||||||
|
|
||||||
|
assert self._fuzzy_config is not None
|
||||||
|
|
||||||
|
fuzzy_matcher: FuzzyNgramMatcher | None = None
|
||||||
|
fuzzy_responses: FuzzyLanguageResponses | None = None
|
||||||
|
|
||||||
|
start_time = time.monotonic()
|
||||||
|
fuzzy_responses = fuzzy_info.responses
|
||||||
|
fuzzy_matcher = FuzzyNgramMatcher(
|
||||||
|
intents=intents,
|
||||||
|
intent_models={
|
||||||
|
intent_name: Sqlite3NgramModel(
|
||||||
|
order=fuzzy_model.order,
|
||||||
|
words={
|
||||||
|
word: str(word_id)
|
||||||
|
for word, word_id in fuzzy_model.words.items()
|
||||||
|
},
|
||||||
|
database_path=fuzzy_model.database_path,
|
||||||
|
)
|
||||||
|
for intent_name, fuzzy_model in fuzzy_info.ngram_models.items()
|
||||||
|
},
|
||||||
|
intent_slot_list_names=self._fuzzy_config.slot_list_names,
|
||||||
|
slot_combinations={
|
||||||
|
intent_name: {
|
||||||
|
combo_key: [
|
||||||
|
SlotCombinationInfo(
|
||||||
|
name_domains=(set(name_domains) if name_domains else None)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
for combo_key, name_domains in intent_combos.items()
|
||||||
|
}
|
||||||
|
for intent_name, intent_combos in self._fuzzy_config.slot_combinations.items()
|
||||||
|
},
|
||||||
|
domain_keywords=fuzzy_info.domain_keywords,
|
||||||
|
stop_words=fuzzy_info.stop_words,
|
||||||
|
)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Loaded fuzzy matcher in %s second(s): language=%s, intents=%s",
|
||||||
|
time.monotonic() - start_time,
|
||||||
|
language_variant,
|
||||||
|
sorted(fuzzy_matcher.intent_models.keys()),
|
||||||
|
)
|
||||||
|
|
||||||
return LanguageIntents(
|
return LanguageIntents(
|
||||||
intents,
|
intents,
|
||||||
intents_dict,
|
intents_dict,
|
||||||
intent_responses,
|
intent_responses,
|
||||||
error_responses,
|
error_responses,
|
||||||
language_variant,
|
language_variant,
|
||||||
|
fuzzy_matcher=fuzzy_matcher,
|
||||||
|
fuzzy_responses=fuzzy_responses,
|
||||||
)
|
)
|
||||||
|
|
||||||
@core.callback
|
@core.callback
|
||||||
@@ -1027,8 +1211,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# Slot lists have changed, so we must clear the cache
|
# Slot lists have changed, so we must clear the cache
|
||||||
self._intent_cache.clear()
|
self._intent_cache.clear()
|
||||||
|
|
||||||
@core.callback
|
async def _make_slot_lists(self) -> dict[str, SlotList]:
|
||||||
def _make_slot_lists(self) -> dict[str, SlotList]:
|
|
||||||
"""Create slot lists with areas and entity names/aliases."""
|
"""Create slot lists with areas and entity names/aliases."""
|
||||||
if self._slot_lists is not None:
|
if self._slot_lists is not None:
|
||||||
return self._slot_lists
|
return self._slot_lists
|
||||||
@@ -1089,6 +1272,10 @@ class DefaultAgent(ConversationEntity):
|
|||||||
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
|
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Reload fuzzy matchers with new slot lists
|
||||||
|
if self.fuzzy_matching:
|
||||||
|
await self.hass.async_add_executor_job(self._load_fuzzy_matchers)
|
||||||
|
|
||||||
self._listen_clear_slot_list()
|
self._listen_clear_slot_list()
|
||||||
|
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
@@ -1098,6 +1285,25 @@ class DefaultAgent(ConversationEntity):
|
|||||||
|
|
||||||
return self._slot_lists
|
return self._slot_lists
|
||||||
|
|
||||||
|
def _load_fuzzy_matchers(self) -> None:
|
||||||
|
"""Reload fuzzy matchers for all loaded languages."""
|
||||||
|
for lang_intents in self._lang_intents.values():
|
||||||
|
if (not isinstance(lang_intents, LanguageIntents)) or (
|
||||||
|
lang_intents.fuzzy_matcher is None
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
lang_matcher = lang_intents.fuzzy_matcher
|
||||||
|
lang_intents.fuzzy_matcher = FuzzyNgramMatcher(
|
||||||
|
intents=lang_matcher.intents,
|
||||||
|
intent_models=lang_matcher.intent_models,
|
||||||
|
intent_slot_list_names=lang_matcher.intent_slot_list_names,
|
||||||
|
slot_combinations=lang_matcher.slot_combinations,
|
||||||
|
domain_keywords=lang_matcher.domain_keywords,
|
||||||
|
stop_words=lang_matcher.stop_words,
|
||||||
|
slot_lists=self._slot_lists,
|
||||||
|
)
|
||||||
|
|
||||||
def _make_intent_context(
|
def _make_intent_context(
|
||||||
self, user_input: ConversationInput
|
self, user_input: ConversationInput
|
||||||
) -> dict[str, Any] | None:
|
) -> dict[str, Any] | None:
|
||||||
@@ -1183,7 +1389,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
for trigger_intent in trigger_intents.intents.values():
|
for trigger_intent in trigger_intents.intents.values():
|
||||||
for intent_data in trigger_intent.data:
|
for intent_data in trigger_intent.data:
|
||||||
for sentence in intent_data.sentences:
|
for sentence in intent_data.sentences:
|
||||||
_collect_list_references(sentence, wildcard_names)
|
_collect_list_references(sentence.expression, wildcard_names)
|
||||||
|
|
||||||
for wildcard_name in wildcard_names:
|
for wildcard_name in wildcard_names:
|
||||||
trigger_intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
|
trigger_intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
|
||||||
@@ -1520,11 +1726,9 @@ def _get_match_error_response(
|
|||||||
|
|
||||||
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
|
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
|
||||||
"""Collect list reference names recursively."""
|
"""Collect list reference names recursively."""
|
||||||
if isinstance(expression, Sequence):
|
if isinstance(expression, Group):
|
||||||
seq: Sequence = expression
|
for item in expression.items:
|
||||||
for item in seq.items:
|
|
||||||
_collect_list_references(item, list_names)
|
_collect_list_references(item, list_names)
|
||||||
elif isinstance(expression, ListReference):
|
elif isinstance(expression, ListReference):
|
||||||
# {list}
|
# {list}
|
||||||
list_ref: ListReference = expression
|
list_names.add(expression.slot_name)
|
||||||
list_names.add(list_ref.slot_name)
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user